qdkit 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qdkit-0.1.0/LICENSE +21 -0
- qdkit-0.1.0/PKG-INFO +41 -0
- qdkit-0.1.0/README.md +3 -0
- qdkit-0.1.0/pyproject.toml +6 -0
- qdkit-0.1.0/setup.cfg +4 -0
- qdkit-0.1.0/setup.py +39 -0
- qdkit-0.1.0/src/fdkit/__init__.py +5 -0
- qdkit-0.1.0/src/fdkit/blp_utils.py +32 -0
- qdkit-0.1.0/src/fdkit/commons.py +54 -0
- qdkit-0.1.0/src/fdkit/db_utils.py +150 -0
- qdkit-0.1.0/src/fdkit/gtja_api.py +106 -0
- qdkit-0.1.0/src/fdkit/ht_insight.py +98 -0
- qdkit-0.1.0/src/fdkit/wind_utils.py +192 -0
- qdkit-0.1.0/src/qdkit.egg-info/PKG-INFO +41 -0
- qdkit-0.1.0/src/qdkit.egg-info/SOURCES.txt +16 -0
- qdkit-0.1.0/src/qdkit.egg-info/dependency_links.txt +1 -0
- qdkit-0.1.0/src/qdkit.egg-info/requires.txt +13 -0
- qdkit-0.1.0/src/qdkit.egg-info/top_level.txt +1 -0
qdkit-0.1.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2020 Artipie
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
qdkit-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: qdkit
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: financial data toolkit
|
|
5
|
+
Home-page: https://pypi.org/project/qdkit/
|
|
6
|
+
Author: teleping
|
|
7
|
+
Author-email: teleping@gmail.com
|
|
8
|
+
Project-URL: Bug Tracker, https://pypi.org/project/qdkit/
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Requires-Python: >=3.8
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: pandas
|
|
16
|
+
Requires-Dist: sqlalchemy
|
|
17
|
+
Requires-Dist: pangres
|
|
18
|
+
Requires-Dist: pymysql
|
|
19
|
+
Requires-Dist: logbook
|
|
20
|
+
Requires-Dist: pyyaml
|
|
21
|
+
Requires-Dist: requests
|
|
22
|
+
Provides-Extra: bloomberg
|
|
23
|
+
Requires-Dist: xbbg; extra == "bloomberg"
|
|
24
|
+
Provides-Extra: wind
|
|
25
|
+
Requires-Dist: WindPy; extra == "wind"
|
|
26
|
+
Dynamic: author
|
|
27
|
+
Dynamic: author-email
|
|
28
|
+
Dynamic: classifier
|
|
29
|
+
Dynamic: description
|
|
30
|
+
Dynamic: description-content-type
|
|
31
|
+
Dynamic: home-page
|
|
32
|
+
Dynamic: license-file
|
|
33
|
+
Dynamic: project-url
|
|
34
|
+
Dynamic: provides-extra
|
|
35
|
+
Dynamic: requires-dist
|
|
36
|
+
Dynamic: requires-python
|
|
37
|
+
Dynamic: summary
|
|
38
|
+
|
|
39
|
+
# qdkit
|
|
40
|
+
|
|
41
|
+
Quant Data Toolkit sdk for python.
|
qdkit-0.1.0/README.md
ADDED
qdkit-0.1.0/setup.cfg
ADDED
qdkit-0.1.0/setup.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import setuptools
|
|
2
|
+
|
|
3
|
+
with open("README.md", "r", encoding="utf-8") as fh:
|
|
4
|
+
long_description = fh.read()
|
|
5
|
+
|
|
6
|
+
setuptools.setup(
|
|
7
|
+
name="qdkit",
|
|
8
|
+
version="0.1.0",
|
|
9
|
+
author="teleping",
|
|
10
|
+
author_email="teleping@gmail.com",
|
|
11
|
+
description="financial data toolkit",
|
|
12
|
+
long_description=long_description,
|
|
13
|
+
long_description_content_type="text/markdown",
|
|
14
|
+
url="https://pypi.org/project/qdkit/",
|
|
15
|
+
project_urls={
|
|
16
|
+
"Bug Tracker": "https://pypi.org/project/qdkit/",
|
|
17
|
+
},
|
|
18
|
+
classifiers=[
|
|
19
|
+
"Programming Language :: Python :: 3",
|
|
20
|
+
"License :: OSI Approved :: MIT License",
|
|
21
|
+
"Operating System :: OS Independent",
|
|
22
|
+
],
|
|
23
|
+
package_dir={"": "src"},
|
|
24
|
+
packages=setuptools.find_packages(where="src"),
|
|
25
|
+
python_requires=">=3.8",
|
|
26
|
+
install_requires=[
|
|
27
|
+
"pandas",
|
|
28
|
+
"sqlalchemy",
|
|
29
|
+
"pangres",
|
|
30
|
+
"pymysql",
|
|
31
|
+
"logbook",
|
|
32
|
+
"pyyaml",
|
|
33
|
+
"requests",
|
|
34
|
+
],
|
|
35
|
+
extras_require={
|
|
36
|
+
"bloomberg": ["xbbg"],
|
|
37
|
+
"wind": ["WindPy"],
|
|
38
|
+
},
|
|
39
|
+
)
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# @author: zhangping
|
|
3
|
+
|
|
4
|
+
import datetime as dt
|
|
5
|
+
from xbbg import blp
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class BlpUtil:
|
|
9
|
+
"""
|
|
10
|
+
彭博接口工具类
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
@classmethod
|
|
14
|
+
def bdh(cls, ticker, flds='PX_LAST', start_date='20200101', end_date=None, overrides=None):
|
|
15
|
+
"""
|
|
16
|
+
获取多时间序列数据
|
|
17
|
+
~~~~~~~~~~~~~~~~
|
|
18
|
+
d = BlpUtil.bdh('600570 CH Equity', flds='PX_LAST', start_date='2021-04-01')
|
|
19
|
+
"""
|
|
20
|
+
params = {'tickers': ticker, 'flds': flds, 'start_date': start_date, 'end_date': cls.get_date_str(end_date)}
|
|
21
|
+
params = params if overrides is None else dict(list(params.items()) + list(overrides.items()))
|
|
22
|
+
df = blp.bdh(**params)
|
|
23
|
+
if df is not None and len(df) > 0:
|
|
24
|
+
df = df[ticker]
|
|
25
|
+
df = df.rename_axis('date').reset_index()
|
|
26
|
+
return df
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
def get_date_str(cls, date, format='%Y%m%d'):
|
|
30
|
+
date = date if date is not None else dt.datetime.today()
|
|
31
|
+
date = date if type(date) == str else date.strftime(format)
|
|
32
|
+
return date
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
@Time : 2023/12/28
|
|
4
|
+
@Auth : zhangping
|
|
5
|
+
"""
|
|
6
|
+
import os, base64, yaml, logbook, logbook.more, pandas as pd
|
|
7
|
+
# from jinja2 import Environment, FileSystemLoader
|
|
8
|
+
|
|
9
|
+
pd.set_option('display.max_columns', None), pd.set_option('display.max_rows', None)
|
|
10
|
+
pd.set_option('display.width', 10000), pd.set_option('mode.use_inf_as_na', True)
|
|
11
|
+
|
|
12
|
+
find_path = lambda name, depth=5: ['../' * i + name for i in range(depth) if os.path.exists('../' * i + name)][0]
|
|
13
|
+
config = yaml.safe_load(open(find_path('config.yaml'), 'r', encoding='utf-8'))
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# class FileUtils:
|
|
17
|
+
# @classmethod
|
|
18
|
+
# def image2base64(cls, image_file, with_frefix=False, clear_file=False):
|
|
19
|
+
# with open(image_file, "rb") as file:
|
|
20
|
+
# encoded_string = base64.b64encode(file.read()).decode('utf-8')
|
|
21
|
+
# if clear_file: os.remove(image_file)
|
|
22
|
+
# return f'data:image/png;base64,{encoded_string}' if with_frefix else encoded_string
|
|
23
|
+
#
|
|
24
|
+
# @classmethod
|
|
25
|
+
# def render_template(cls, target, context, template_file='template.html', template_root='./'):
|
|
26
|
+
# env = Environment(loader=FileSystemLoader(template_root))
|
|
27
|
+
# template = env.get_template(template_file)
|
|
28
|
+
# with open(target, 'w', encoding='utf-8') as file:
|
|
29
|
+
# file.write(template.render(**context))
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def get_logger(level='INFO', std_log=True, file_log=True, file_folder='logs'):
|
|
33
|
+
logs_dir = find_path(file_folder)
|
|
34
|
+
# if not os.path.exists(logs_dir): os.makedirs(logs_dir)
|
|
35
|
+
logbook.set_datetime_format('local')
|
|
36
|
+
log = logbook.Logger('log')
|
|
37
|
+
log.handlers = []
|
|
38
|
+
log_formate = lambda record, handler: \
|
|
39
|
+
f'[{record.time}] [{record.level_name}] [{os.path.split(record.filename)[-1]}] [{record.func_name}] [{record.lineno}] {record.message}'
|
|
40
|
+
|
|
41
|
+
if std_log: # 屏幕打印
|
|
42
|
+
std_handler = logbook.more.ColorizedStderrHandler(level=level, bubble=True)
|
|
43
|
+
std_handler.formatter = log_formate
|
|
44
|
+
log.handlers.append(std_handler)
|
|
45
|
+
if file_log: # 日志文件
|
|
46
|
+
file_handler = logbook.TimedRotatingFileHandler(
|
|
47
|
+
os.path.join(logs_dir, '%s.log' % 'log'), level=level, date_format='%Y-%m-%d', bubble=True,
|
|
48
|
+
encoding='utf-8')
|
|
49
|
+
file_handler.formatter = log_formate
|
|
50
|
+
log.handlers.append(file_handler)
|
|
51
|
+
return log
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
logger = get_logger()
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
@Date : 2026/3/28
|
|
4
|
+
@Auth : zhangping
|
|
5
|
+
"""
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from sqlalchemy import create_engine, text
|
|
8
|
+
from sqlalchemy.engine import Engine
|
|
9
|
+
from sqlalchemy.orm import Session
|
|
10
|
+
from sqlalchemy.types import VARCHAR, Date, Integer, DECIMAL
|
|
11
|
+
from pangres import upsert
|
|
12
|
+
from .commons import config
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DBUtils:
|
|
16
|
+
_engines = {}
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
def new_engine(cls, name, echo=False) -> Engine:
|
|
20
|
+
conn_str = config['database'][name]
|
|
21
|
+
connect_args = {'autocommit': True} if 'mssql' in conn_str else {}
|
|
22
|
+
return create_engine(conn_str, pool_pre_ping=True, pool_size=10, max_overflow=10,
|
|
23
|
+
pool_recycle=3600, connect_args=connect_args, echo=echo)
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
def get_engine(cls, name) -> Engine:
|
|
27
|
+
if config['database'][name] not in cls._engines.keys():
|
|
28
|
+
cls._engines[config['database'][name]] = cls.new_engine(name, echo=False)
|
|
29
|
+
return cls._engines[config['database'][name]]
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def set_engine(cls, name, engine) -> Engine:
|
|
33
|
+
existing = cls._engines.get(name)
|
|
34
|
+
if existing is not None:
|
|
35
|
+
existing.dispose()
|
|
36
|
+
cls._engines[name] = engine
|
|
37
|
+
return cls._engines[name]
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def clear_engine(cls):
|
|
41
|
+
for key in cls._engines:
|
|
42
|
+
if cls._engines.get(key) is not None:
|
|
43
|
+
cls._engines.get(key).dispose()
|
|
44
|
+
cls._engines.clear()
|
|
45
|
+
|
|
46
|
+
@classmethod
|
|
47
|
+
def execute(cls, engine, sql):
|
|
48
|
+
with Session(engine) as session:
|
|
49
|
+
session.execute(text(sql) if isinstance(sql, str) else sql)
|
|
50
|
+
session.commit()
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def append(cls, conn, instances):
|
|
54
|
+
if not isinstance(instances, list):
|
|
55
|
+
instances = [instances]
|
|
56
|
+
with Session(conn) as session:
|
|
57
|
+
session.add_all(instances)
|
|
58
|
+
session.commit()
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def delete(cls, conn, instances):
|
|
62
|
+
if not isinstance(instances, list):
|
|
63
|
+
instances = [instances]
|
|
64
|
+
with Session(conn) as session:
|
|
65
|
+
for instance in instances:
|
|
66
|
+
session.delete(instance)
|
|
67
|
+
session.commit()
|
|
68
|
+
|
|
69
|
+
@classmethod
|
|
70
|
+
def truncate(cls, conn, table):
|
|
71
|
+
cls.execute(conn, f'truncate {table}')
|
|
72
|
+
|
|
73
|
+
@staticmethod
|
|
74
|
+
def _df_types(df: pd.DataFrame):
|
|
75
|
+
type_dict = {}
|
|
76
|
+
for i, j in zip(df.columns, df.dtypes):
|
|
77
|
+
if 'object' in str(j): type_dict.update({i: VARCHAR(length=255)})
|
|
78
|
+
if 'float' in str(j): type_dict.update({i: DECIMAL(20, 5)})
|
|
79
|
+
if 'int' in str(j): type_dict.update({i: Integer()})
|
|
80
|
+
if 'date' in str(j): type_dict.update({i: Date()})
|
|
81
|
+
return type_dict
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class TableUpdater:
|
|
85
|
+
def __init__(self, table_name: str, db: str = 'db_name', date_column: str = 'date',
|
|
86
|
+
code_column: str = 'code', engine: Engine = None):
|
|
87
|
+
self.db = db
|
|
88
|
+
self.engine = engine
|
|
89
|
+
self.table = table_name.lower()
|
|
90
|
+
self.date_column = None if date_column is None else date_column.lower()
|
|
91
|
+
self.code_column = None if code_column is None else code_column.lower()
|
|
92
|
+
|
|
93
|
+
def get_db(self):
|
|
94
|
+
return self.db
|
|
95
|
+
|
|
96
|
+
def get_table(self):
|
|
97
|
+
return self.table
|
|
98
|
+
|
|
99
|
+
def get_conn(self):
|
|
100
|
+
self.engine = self.engine if self.engine is not None else DBUtils.new_engine(self.db)
|
|
101
|
+
# print(self.engine.engine.pool.status())
|
|
102
|
+
return self.engine
|
|
103
|
+
|
|
104
|
+
def _get_code_condition(self, code):
|
|
105
|
+
return f"and {self.code_column}='{code}'"
|
|
106
|
+
|
|
107
|
+
def get_last_date(self, conditions='1=1'):
|
|
108
|
+
if self.date_column is None: return None
|
|
109
|
+
sql = f'select max({self.date_column}) date_max from {self.table} where {conditions}'
|
|
110
|
+
df = pd.read_sql(sql, self.get_conn())
|
|
111
|
+
last_date = df['date_max'][0] if df['date_max'][0] is not None else None
|
|
112
|
+
return last_date
|
|
113
|
+
|
|
114
|
+
def get_last_date_by_code(self, code, conditions='1=1'):
|
|
115
|
+
if self.date_column is None: return None
|
|
116
|
+
return self.get_last_date(conditions + ' ' + self._get_code_condition(code))
|
|
117
|
+
|
|
118
|
+
def dispose(self):
|
|
119
|
+
if self.engine is not None:
|
|
120
|
+
self.engine.dispose()
|
|
121
|
+
|
|
122
|
+
def delete_last_date(self):
|
|
123
|
+
sql = f'delete from {self.table} where {self.date_column}=(select max({self.date_column}) from {self.table})'
|
|
124
|
+
DBUtils.execute(self.get_conn(), sql)
|
|
125
|
+
return self
|
|
126
|
+
|
|
127
|
+
def delete_last_date_by_code(self, code):
|
|
128
|
+
sql = f"delete from {self.table} from {self.table} where {self.date_column}=(select max({self.date_column}) and {self.code_column}='{code}'"
|
|
129
|
+
DBUtils.execute(self.get_conn(), sql)
|
|
130
|
+
return self
|
|
131
|
+
|
|
132
|
+
def truncate(self):
|
|
133
|
+
DBUtils.truncate(self.get_conn(), self.table)
|
|
134
|
+
return self
|
|
135
|
+
|
|
136
|
+
def delete_by_code(self, code, conditions='1=1'):
|
|
137
|
+
sql = f'delete from {self.table} where {conditions} {self._get_code_condition(code)}'
|
|
138
|
+
DBUtils.execute(self.get_conn(), sql)
|
|
139
|
+
return self
|
|
140
|
+
|
|
141
|
+
def append(self, df):
|
|
142
|
+
if df is not None and len(df) > 0:
|
|
143
|
+
df.to_sql(name=self.table, con=self.get_conn(), dtype=DBUtils._df_types(df), if_exists='append',
|
|
144
|
+
index=False)
|
|
145
|
+
return self
|
|
146
|
+
|
|
147
|
+
def upsert(self, df: pd.DataFrame):
|
|
148
|
+
if df is None or len(df) == 0: return 0
|
|
149
|
+
return upsert(con=self.get_conn(), df=df, table_name=self.table, if_row_exists='update',
|
|
150
|
+
dtype=DBUtils._df_types(df), chunksize=1000, create_table=False)
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
@Date : 2026/3/17
|
|
4
|
+
@Auth : zhangping
|
|
5
|
+
@Desc :国泰君安API封装
|
|
6
|
+
"""
|
|
7
|
+
import pandas as pd, datetime as dt, requests, json, re
|
|
8
|
+
|
|
9
|
+
from .commons import config, logger
|
|
10
|
+
|
|
11
|
+
# pd.set_option("display.max_columns", None), pd.set_option("display.max_rows", None)
|
|
12
|
+
# pd.set_option("display.max_colwidth", None), pd.set_option("mode.use_inf_as_na", True)
|
|
13
|
+
# pd.set_option("display.width", None), pd.set_option("display.expand_frame_repr", False)
|
|
14
|
+
|
|
15
|
+
api_url = 'https://vip.gtjaqh.com:443/api/'
|
|
16
|
+
|
|
17
|
+
api_futures_contract = api_url + 'unicorn.cloudApi.contractTradeParams.query.do' # 合约参数查询
|
|
18
|
+
api_futures_prices = api_url + 'unicorn.cloudApi.futuresContractPrice.queryByCode.do' # 期货合约价格查询
|
|
19
|
+
api_futures_basis = api_url + 'unicorn.cloudApi.basisData.query.do' # 基差数据查询
|
|
20
|
+
api_futures_inventory = api_url + 'unicorn.cloudApi.inventoryData.queryByCode.do' # 库存数据查询
|
|
21
|
+
api_futures_profit = api_url + 'unicorn.cloudApi.processProfitData.queryByCode.do' # 加工利润数据查询
|
|
22
|
+
|
|
23
|
+
# ==================== 请求头 ====================
|
|
24
|
+
request_header = {
|
|
25
|
+
'accessKeyId': config['gtja']['access_key_id'],
|
|
26
|
+
'accessKeySecret': config['gtja']['access_key_secret'],
|
|
27
|
+
'Content-Type': 'application/json;charset=utf-8',
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
# ==================== 交易所编码映射 ====================
|
|
31
|
+
# 国君交易所编码 <-> 万得交易所编码 映射
|
|
32
|
+
gj_exchanges = ['CFFEX', 'CZCE', 'DCE', 'INE', 'SHFE', 'GFEX']
|
|
33
|
+
wd_exchanges = ['CFE', 'CZC', 'DCE', 'INE', 'SHF', 'GFE']
|
|
34
|
+
gj_to_wd = dict(zip(gj_exchanges, wd_exchanges))
|
|
35
|
+
wd_to_gj = dict(zip(wd_exchanges, gj_exchanges))
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _camel_to_snake(name):
|
|
39
|
+
"""驼峰命名转下划线命名"""
|
|
40
|
+
name = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
|
|
41
|
+
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', name).lower()
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _get_date_str(date, format='%Y%m%d'):
|
|
45
|
+
"""获取日期字符串,参数为None时默认当天,支持date/datetime,支持整型/字符串如:'20260308'或'2026/03/08'或'2026-03-08'"""
|
|
46
|
+
if date is None: date = dt.datetime.today()
|
|
47
|
+
if isinstance(date, int): date = str(date)
|
|
48
|
+
if isinstance(date, str): date = dt.datetime.strptime(date.replace('/', '').replace('-', ''), '%Y%m%d')
|
|
49
|
+
if isinstance(date, dt.date): date = date.strftime(format)
|
|
50
|
+
return date
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _api_query(api_url, params):
|
|
54
|
+
"""通用 POST 请求封装,返回 DataFrame,失败返回 None"""
|
|
55
|
+
result = requests.request('POST', api_url, headers=request_header, data=json.dumps(params)).json()
|
|
56
|
+
if result['code'] != 0:
|
|
57
|
+
logger.error(f"{api_url} | {params} | {result['msg']}")
|
|
58
|
+
return None
|
|
59
|
+
df = pd.DataFrame(result['data'])
|
|
60
|
+
# return df.rename(columns={col: camel_to_snake(col) for col in df.columns})
|
|
61
|
+
return df
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# ==================== 数据查询接口 ====================
|
|
65
|
+
|
|
66
|
+
def get_futures_contracts(date=None):
|
|
67
|
+
"""查询指定日期所有交易所的期货合约参数,返回合并后的 DataFrame"""
|
|
68
|
+
results = []
|
|
69
|
+
for e in gj_exchanges:
|
|
70
|
+
result = _api_query(api_futures_contract, {'tradingDay': _get_date_str(date), 'exchangeCode': e})
|
|
71
|
+
if result is not None: results.append(result)
|
|
72
|
+
return pd.concat(results, ignore_index=True) if results else pd.DataFrame()
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def get_futures_prices(date=None):
|
|
76
|
+
"""查询指定日期期货合约价格"""
|
|
77
|
+
return _api_query(api_futures_prices, {'tradingDay': _get_date_str(date), 'exchangeCode': None})
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def get_futures_basis(code=None, start_date=None, end_date=None):
|
|
81
|
+
"""查询期货基差数据,code 为合约代码,不传则查全部"""
|
|
82
|
+
return _api_query(api_futures_basis, {
|
|
83
|
+
'code': code,
|
|
84
|
+
'startReportDate': _get_date_str(start_date, '%Y-%m-%d'),
|
|
85
|
+
'endReportDate': _get_date_str(end_date, '%Y-%m-%d')
|
|
86
|
+
})
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def get_futures_inventory(code, report_date=None, start_date=None, end_date=None):
|
|
90
|
+
"""查询期货库存数据,code 为品种代码"""
|
|
91
|
+
return _api_query(api_futures_inventory, {
|
|
92
|
+
'code': code,
|
|
93
|
+
# 'reportDate': _get_date_str(report_date, '%Y-%m-%d'),
|
|
94
|
+
'startDataDate': _get_date_str(start_date, '%Y-%m-%d'),
|
|
95
|
+
'endDataDate': _get_date_str(end_date, '%Y-%m-%d')
|
|
96
|
+
})
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def get_futures_profit(code, report_date=None, start_date=None, end_date=None):
|
|
100
|
+
"""查询期货加工利润数据,code 为品种代码"""
|
|
101
|
+
return _api_query(api_futures_profit, {
|
|
102
|
+
'code': code,
|
|
103
|
+
# 'reportDate': _get_date_str(report_date, '%Y-%m-%d'),
|
|
104
|
+
'startDataDate': _get_date_str(start_date, '%Y-%m-%d'),
|
|
105
|
+
'endDataDate': _get_date_str(end_date, '%Y-%m-%d')
|
|
106
|
+
})
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# @author: zhangping
|
|
3
|
+
|
|
4
|
+
import datetime as dt, pandas as pd
|
|
5
|
+
from insight_python.com.insight import common
|
|
6
|
+
from insight_python.com.insight import query
|
|
7
|
+
from insight_python.com.insight.market_service import market_service
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class HT_Insight():
|
|
11
|
+
|
|
12
|
+
def __init__(self, user='******', password='******', login_log=False, open_trace=False,
|
|
13
|
+
open_file_log=False, open_console_log=False):
|
|
14
|
+
self.user = user
|
|
15
|
+
self.password = password
|
|
16
|
+
self.login_log = login_log
|
|
17
|
+
self.open_trace = open_trace
|
|
18
|
+
self.open_file_log = open_file_log
|
|
19
|
+
self.open_console_log = open_console_log
|
|
20
|
+
self.re_login()
|
|
21
|
+
|
|
22
|
+
def re_login(self):
|
|
23
|
+
# print(self.get_version())
|
|
24
|
+
common.login(InsightMarketService(), self.user, self.password, login_log=self.login_log)
|
|
25
|
+
common.config(self.open_trace, self.open_file_log, self.open_console_log)
|
|
26
|
+
|
|
27
|
+
@staticmethod
|
|
28
|
+
def get_version(cls):
|
|
29
|
+
return common.get_version()
|
|
30
|
+
|
|
31
|
+
def close(self):
|
|
32
|
+
common.fini()
|
|
33
|
+
|
|
34
|
+
def __enter__(self):
|
|
35
|
+
return self
|
|
36
|
+
|
|
37
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
38
|
+
self.close()
|
|
39
|
+
return True
|
|
40
|
+
|
|
41
|
+
def get_all_stocks_info(self, start_date: dt.datetime = None, end_date: dt.datetime = None, exchange=None,
|
|
42
|
+
listing_state='上市交易'):
|
|
43
|
+
start_date = dt.datetime(1989, 1, 1) if start_date is None else start_date
|
|
44
|
+
end_date = dt.datetime.now() if end_date is None else end_date
|
|
45
|
+
return query.get_all_stocks_info(listing_date=[start_date, end_date], exchange=exchange,
|
|
46
|
+
listing_state=listing_state)
|
|
47
|
+
|
|
48
|
+
def get_kline(self, codes, start_date: dt.datetime = dt.datetime(2015, 1, 1),
|
|
49
|
+
end_date: dt.datetime = dt.datetime.now(),
|
|
50
|
+
frequency="daily", fq="none"):
|
|
51
|
+
return query.get_kline(htsc_code=codes, time=[start_date, end_date], frequency=frequency, fq=fq)
|
|
52
|
+
|
|
53
|
+
def get_fin_indicator(self, code, start_date: dt.datetime = None, end_date: dt.datetime = None, period='Q4'):
|
|
54
|
+
start_date = dt.datetime(2015, 1, 1) if start_date is None else start_date
|
|
55
|
+
end_date = dt.datetime.now() if end_date is None else end_date
|
|
56
|
+
return query.get_fin_indicator(htsc_code=code, end_date=[start_date, end_date], period=period)
|
|
57
|
+
|
|
58
|
+
def get_stock_valuation(self, code, start_date: dt.datetime = None, end_date: dt.datetime = None):
|
|
59
|
+
start_date = dt.datetime(2015, 1, 1) if start_date is None else start_date
|
|
60
|
+
end_date = dt.datetime.now() if end_date is None else end_date
|
|
61
|
+
return query.get_stock_valuation(htsc_code=code, trading_day=[start_date, end_date])
|
|
62
|
+
|
|
63
|
+
def get_income_statement(self, code, start_date: dt.datetime = None, end_date: dt.datetime = None, period='Q4'):
|
|
64
|
+
start_date = dt.datetime(2015, 1, 1) if start_date is None else start_date
|
|
65
|
+
end_date = dt.datetime.now() if end_date is None else end_date
|
|
66
|
+
return query.get_income_statement(htsc_code=code, end_date=[start_date, end_date], period=period)
|
|
67
|
+
|
|
68
|
+
def get_balance_sheet(self, code, start_date: dt.datetime = None, end_date: dt.datetime = None, period='Q4'):
|
|
69
|
+
start_date = dt.datetime(2015, 1, 1) if start_date is None else start_date
|
|
70
|
+
end_date = dt.datetime.now() if end_date is None else end_date
|
|
71
|
+
return query.get_balance_sheet(htsc_code=code, end_date=[start_date, end_date], period=period)
|
|
72
|
+
|
|
73
|
+
def get_cashflow_statement(self, code, start_date: dt.datetime = None, end_date: dt.datetime = None, period='Q4'):
|
|
74
|
+
start_date = dt.datetime(2015, 1, 1) if start_date is None else start_date
|
|
75
|
+
end_date = dt.datetime.now() if end_date is None else end_date
|
|
76
|
+
return query.get_cashflow_statement(htsc_code=code, end_date=[start_date, end_date], period=period)
|
|
77
|
+
|
|
78
|
+
def get_new_con_bond(self):
|
|
79
|
+
return query.get_new_con_bond()
|
|
80
|
+
|
|
81
|
+
def get_trading_days(self, start_date: dt.datetime = None, end_date: dt.datetime = None, exchange='XSHG'):
|
|
82
|
+
start_date = dt.datetime(2015, 1, 1) if start_date is None else start_date
|
|
83
|
+
end_date = dt.datetime.now() if end_date is None else end_date
|
|
84
|
+
print(exchange, start_date, end_date)
|
|
85
|
+
return pd.DataFrame(
|
|
86
|
+
query.get_trading_days(exchange=exchange, trading_day=[start_date, end_date], count=None)[1])
|
|
87
|
+
|
|
88
|
+
def get_industries(self, name='sw_l1'):
|
|
89
|
+
return query.get_industries(classified=name)
|
|
90
|
+
|
|
91
|
+
def get_index_component(self, code, date):
|
|
92
|
+
return query.get_index_component(code, None, None, date)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class InsightMarketService(market_service):
|
|
96
|
+
def on_query_response(self, result):
|
|
97
|
+
for response in iter(result):
|
|
98
|
+
print(response)
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# @author: zhangping
|
|
3
|
+
|
|
4
|
+
import logging
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import datetime as dt
|
|
7
|
+
from WindPy import w
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def startwind(fn):
|
|
11
|
+
"""
|
|
12
|
+
wind接口初始化
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def wrapper(*args1, **args2):
|
|
16
|
+
if not w.isconnected():
|
|
17
|
+
w.start()
|
|
18
|
+
return fn(*args1, **args2)
|
|
19
|
+
|
|
20
|
+
return wrapper
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class WindUtil:
|
|
24
|
+
"""
|
|
25
|
+
Wind接口工具类
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
@startwind
|
|
30
|
+
def tdays(cls, start_date, end_date=None):
|
|
31
|
+
'''
|
|
32
|
+
获取交易日
|
|
33
|
+
~~~~~~~~~~~~~~~~
|
|
34
|
+
l = WindUtil.tdays('2020-03-07')
|
|
35
|
+
l = WindUtil.tdays('2020-03-07', end_date='2020-04-07')
|
|
36
|
+
'''
|
|
37
|
+
l = w.tdays(cls.get_date_str(start_date), cls.get_date_str(end_date), "").Data
|
|
38
|
+
return l[0] if l is not None and len(l) > 0 else []
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
@startwind
|
|
42
|
+
def sectors(cls, sectorid, date=None):
|
|
43
|
+
"""
|
|
44
|
+
获取板块成分(通过Wind板块ID)
|
|
45
|
+
~~~~~~~~~~~~~~~~
|
|
46
|
+
df = WindUtil.sectors('a001010100000000') #当前全部A股
|
|
47
|
+
df = WindUtil.sectors('1000010084000000', date='2020-08-28') #国内商品品种
|
|
48
|
+
"""
|
|
49
|
+
wd = w.wset('sectorconstituent', f'sectorid={sectorid};date={cls.get_date_str(date)}', usedf=True)
|
|
50
|
+
if wd[0] != 0: logging.error(f' wind error: {wd[0]}')
|
|
51
|
+
return wd[1] if wd[0] == 0 else None
|
|
52
|
+
|
|
53
|
+
@classmethod
|
|
54
|
+
@startwind
|
|
55
|
+
def sectors_by_code(cls, wind_code, date=None):
|
|
56
|
+
"""
|
|
57
|
+
获取板块成分(通过Wind代码)
|
|
58
|
+
~~~~~~~~~~~~~~~~
|
|
59
|
+
df = WindUtil.sectors_by_code('APFI.WI', date='2020-08-28') #Wind农产品指数
|
|
60
|
+
df = WindUtil.sectors_by_code('000300.SH') #沪深300成分股
|
|
61
|
+
"""
|
|
62
|
+
wd = w.wset('sectorconstituent', f'windcode={wind_code};date={cls.get_date_str(date)}', usedf=True)
|
|
63
|
+
if wd[0] != 0: logging.error(f' wind error: {wd[0]}')
|
|
64
|
+
return wd[1] if wd[0] == 0 else None
|
|
65
|
+
|
|
66
|
+
@classmethod
|
|
67
|
+
@startwind
|
|
68
|
+
def fu_contracts(cls, wind_code, start_date=None, end_date=None):
|
|
69
|
+
"""
|
|
70
|
+
获取品种期货合约
|
|
71
|
+
~~~~~~~~~~~~~~~~
|
|
72
|
+
df = WindUtil.fu_contracts('A.DCE') #获取品种合约
|
|
73
|
+
"""
|
|
74
|
+
wd = w.wset('futurecc',
|
|
75
|
+
f'wind_code={wind_code};startdate={cls.get_date_str(start_date)};enddate={cls.get_date_str(end_date)}',
|
|
76
|
+
usedf=True)
|
|
77
|
+
if wd[0] != 0: logging.error(f' wind error: {wd[0]}')
|
|
78
|
+
return wd[1] if wd[0] == 0 else None
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
@startwind
|
|
82
|
+
def fu_hiscode(cls, wind_code, trade_date=None):
|
|
83
|
+
"""
|
|
84
|
+
获取主力合约代码
|
|
85
|
+
~~~~~~~~~~~~~~~~
|
|
86
|
+
code = WindUtil.fu_hiscode('A.DCE') #获取品种合约
|
|
87
|
+
"""
|
|
88
|
+
wd = w.wss(wind_code, 'trade_hiscode', f'tradeDate={cls.get_date_str(trade_date, "%Y%m%d")}', usedf=True)
|
|
89
|
+
if wd[0] != 0: logging.error(f' wind error: {wd[0]}')
|
|
90
|
+
hiscode = wd[1]['TRADE_HISCODE'][0] if wd[0] == 0 else None
|
|
91
|
+
return hiscode if wind_code != hiscode else None
|
|
92
|
+
|
|
93
|
+
@classmethod
|
|
94
|
+
@startwind
|
|
95
|
+
def wsd(cls, codes, fields, start_date=None, end_date=None, options=None):
|
|
96
|
+
"""
|
|
97
|
+
获取多品种单指标或单品种多指标的时间序列数据
|
|
98
|
+
~~~~~~~~~~~~~~~~
|
|
99
|
+
df = WindUtil.wsd('600570.SH', 'open,close,high,low')
|
|
100
|
+
df = WindUtil.wsd('600570.SH', 'open,close,high,low', start_date='2021-04-01', end_date='2021-04-05', options='')
|
|
101
|
+
"""
|
|
102
|
+
df = None
|
|
103
|
+
wd = w.wsd(codes, fields, cls.get_date_str(start_date), cls.get_date_str(end_date), options)
|
|
104
|
+
if wd.ErrorCode != 0: logging.error(f' wind error: {wd.ErrorCode}')
|
|
105
|
+
if wd is not None and wd.ErrorCode == 0 and len(wd.Times) > 0 and len(wd.Data[0]) > 0:
|
|
106
|
+
data = {'date': wd.Times}
|
|
107
|
+
for idx, field in enumerate(wd.Fields):
|
|
108
|
+
data[field] = wd.Data[idx]
|
|
109
|
+
df = pd.DataFrame(data)
|
|
110
|
+
return df
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
@startwind
|
|
114
|
+
def wset(cls, name, options):
|
|
115
|
+
"""
|
|
116
|
+
用来获取数据集信息,包括板块成分、指数成分、ETF申赎成分信息等
|
|
117
|
+
~~~~~~~~~~~~~~~~
|
|
118
|
+
df = WindUtil.wset('sectorconstituent', 'date=2022-02-18;windcode=801080.SI') # 获取 801080.SI的成分和权重
|
|
119
|
+
"""
|
|
120
|
+
wd = w.wset(name, options, usedf=True)
|
|
121
|
+
if wd[0] != 0: logging.error(f' wind error: {wd[0]}')
|
|
122
|
+
return wd[1] if wd[0] == 0 else None
|
|
123
|
+
|
|
124
|
+
# @classmethod
|
|
125
|
+
# @startwind
|
|
126
|
+
# def wss(cls, codes, field, options=None):
|
|
127
|
+
# """
|
|
128
|
+
# df = WindUtil.wss('1000015232000000', 'superiorcode', options)
|
|
129
|
+
# """
|
|
130
|
+
# df = None
|
|
131
|
+
# wd = w.wss(codes, field, options)
|
|
132
|
+
# if wd.ErrorCode != 0: logging.error(f' wind error: {wd.ErrorCode}')
|
|
133
|
+
# if wd is not None and wd.ErrorCode == 0 and len(wd.Data[0]) > 0:
|
|
134
|
+
# data = {}
|
|
135
|
+
# for idx, field in enumerate(wd.Fields):
|
|
136
|
+
# data[field] = wd.Data[idx]
|
|
137
|
+
# df = pd.DataFrame(data)
|
|
138
|
+
# return df
|
|
139
|
+
|
|
140
|
+
@classmethod
|
|
141
|
+
@startwind
|
|
142
|
+
def wss(cls, codes, fields, options=None):
|
|
143
|
+
"""
|
|
144
|
+
WSS多维数据
|
|
145
|
+
~~~~~~~~~~~~~~~~
|
|
146
|
+
code = WindUtil.wss('110095.SH,127084.SZ', 'underlyingcode,underlyingname,clause_conversion2_swapshareprice', 'tradeDate=20240711;unit=1;date=20240711')
|
|
147
|
+
"""
|
|
148
|
+
wd = w.wss(codes, fields, options=options, usedf=True)
|
|
149
|
+
if wd[0] != 0: logging.error(f' wind error: {wd[0]}')
|
|
150
|
+
return wd[1] if wd[0] == 0 else None
|
|
151
|
+
|
|
152
|
+
@classmethod
|
|
153
|
+
@startwind
|
|
154
|
+
def edb(cls, codes, start_date=None, end_date=None, options=None):
|
|
155
|
+
"""
|
|
156
|
+
获取多宏观经济数据
|
|
157
|
+
~~~~~~~~~~~~~~~~
|
|
158
|
+
df = WindUtil.edb('S0049582')
|
|
159
|
+
df = WindUtil.edb('S0049582', start_date='2020-01-01', end_date='2021-04-01', options='')
|
|
160
|
+
"""
|
|
161
|
+
df = None
|
|
162
|
+
wd = w.edb(codes, cls.get_date_str(start_date), cls.get_date_str(end_date), options)
|
|
163
|
+
if wd.ErrorCode != 0: logging.error(f' wind error: {wd.ErrorCode}')
|
|
164
|
+
if wd is not None and wd.ErrorCode == 0 and len(wd.Times) > 0 and len(wd.Data[0]) > 0:
|
|
165
|
+
data = {'date': wd.Times}
|
|
166
|
+
for idx, field in enumerate(wd.Fields):
|
|
167
|
+
data[field] = wd.Data[idx]
|
|
168
|
+
df = pd.DataFrame(data)
|
|
169
|
+
return df
|
|
170
|
+
|
|
171
|
+
@classmethod
|
|
172
|
+
@startwind
|
|
173
|
+
def wses(cls, codes, field, start_date=None, end_date=None, options=None):
|
|
174
|
+
"""
|
|
175
|
+
获取多品种单指标或单品种多指标的时间序列数据
|
|
176
|
+
~~~~~~~~~~~~~~~~
|
|
177
|
+
df = WindUtil.wses('1000015232000000', 'sec_close_avg', start_date='2021-04-01', end_date='2021-04-05', options='')
|
|
178
|
+
"""
|
|
179
|
+
df = None
|
|
180
|
+
wd = w.wses(codes, field, cls.get_date_str(start_date), cls.get_date_str(end_date), options)
|
|
181
|
+
if wd.ErrorCode != 0: logging.error(f' wind error: {wd.ErrorCode}')
|
|
182
|
+
if wd is not None and wd.ErrorCode == 0 and len(wd.Times) > 0 and len(wd.Data[0]) > 0:
|
|
183
|
+
data = {'date': wd.Times}
|
|
184
|
+
for idx, field in enumerate(wd.Fields):
|
|
185
|
+
data[field] = wd.Data[idx]
|
|
186
|
+
df = pd.DataFrame(data)
|
|
187
|
+
return df
|
|
188
|
+
|
|
189
|
+
@classmethod
|
|
190
|
+
def get_date_str(cls, date, fmt='%Y-%m-%d'):
|
|
191
|
+
date = date if date is not None else dt.datetime.today()
|
|
192
|
+
return date if isinstance(date, str) else date.strftime(fmt)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: qdkit
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: financial data toolkit
|
|
5
|
+
Home-page: https://pypi.org/project/qdkit/
|
|
6
|
+
Author: teleping
|
|
7
|
+
Author-email: teleping@gmail.com
|
|
8
|
+
Project-URL: Bug Tracker, https://pypi.org/project/qdkit/
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Requires-Python: >=3.8
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: pandas
|
|
16
|
+
Requires-Dist: sqlalchemy
|
|
17
|
+
Requires-Dist: pangres
|
|
18
|
+
Requires-Dist: pymysql
|
|
19
|
+
Requires-Dist: logbook
|
|
20
|
+
Requires-Dist: pyyaml
|
|
21
|
+
Requires-Dist: requests
|
|
22
|
+
Provides-Extra: bloomberg
|
|
23
|
+
Requires-Dist: xbbg; extra == "bloomberg"
|
|
24
|
+
Provides-Extra: wind
|
|
25
|
+
Requires-Dist: WindPy; extra == "wind"
|
|
26
|
+
Dynamic: author
|
|
27
|
+
Dynamic: author-email
|
|
28
|
+
Dynamic: classifier
|
|
29
|
+
Dynamic: description
|
|
30
|
+
Dynamic: description-content-type
|
|
31
|
+
Dynamic: home-page
|
|
32
|
+
Dynamic: license-file
|
|
33
|
+
Dynamic: project-url
|
|
34
|
+
Dynamic: provides-extra
|
|
35
|
+
Dynamic: requires-dist
|
|
36
|
+
Dynamic: requires-python
|
|
37
|
+
Dynamic: summary
|
|
38
|
+
|
|
39
|
+
# qdkit
|
|
40
|
+
|
|
41
|
+
Quant Data Toolkit sdk for python.
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
setup.py
|
|
5
|
+
src/fdkit/__init__.py
|
|
6
|
+
src/fdkit/blp_utils.py
|
|
7
|
+
src/fdkit/commons.py
|
|
8
|
+
src/fdkit/db_utils.py
|
|
9
|
+
src/fdkit/gtja_api.py
|
|
10
|
+
src/fdkit/ht_insight.py
|
|
11
|
+
src/fdkit/wind_utils.py
|
|
12
|
+
src/qdkit.egg-info/PKG-INFO
|
|
13
|
+
src/qdkit.egg-info/SOURCES.txt
|
|
14
|
+
src/qdkit.egg-info/dependency_links.txt
|
|
15
|
+
src/qdkit.egg-info/requires.txt
|
|
16
|
+
src/qdkit.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
fdkit
|