lidb 1.1.10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lidb might be problematic. Click here for more details.
- lidb-1.1.10/PKG-INFO +17 -0
- lidb-1.1.10/README.md +0 -0
- lidb-1.1.10/lidb/__init__.py +30 -0
- lidb-1.1.10/lidb/database.py +234 -0
- lidb-1.1.10/lidb/dataset.py +310 -0
- lidb-1.1.10/lidb/init.py +42 -0
- lidb-1.1.10/lidb/parse.py +107 -0
- lidb-1.1.10/lidb/qdf/__init__.py +32 -0
- lidb-1.1.10/lidb/qdf/errors.py +65 -0
- lidb-1.1.10/lidb/qdf/expr.py +307 -0
- lidb-1.1.10/lidb/qdf/lazy.py +190 -0
- lidb-1.1.10/lidb/qdf/qdf.py +189 -0
- lidb-1.1.10/lidb/qdf/udf/__init__.py +14 -0
- lidb-1.1.10/lidb/qdf/udf/base_udf.py +146 -0
- lidb-1.1.10/lidb/qdf/udf/cs_udf.py +115 -0
- lidb-1.1.10/lidb/qdf/udf/d_udf.py +183 -0
- lidb-1.1.10/lidb/qdf/udf/itd_udf.py +209 -0
- lidb-1.1.10/lidb/qdf/udf/ts_udf.py +182 -0
- lidb-1.1.10/lidb/svc/__init__.py +6 -0
- lidb-1.1.10/lidb/svc/data.py +138 -0
- lidb-1.1.10/lidb/table.py +129 -0
- lidb-1.1.10/lidb.egg-info/PKG-INFO +17 -0
- lidb-1.1.10/lidb.egg-info/SOURCES.txt +28 -0
- lidb-1.1.10/lidb.egg-info/dependency_links.txt +1 -0
- lidb-1.1.10/lidb.egg-info/requires.txt +11 -0
- lidb-1.1.10/lidb.egg-info/top_level.txt +1 -0
- lidb-1.1.10/pyproject.toml +28 -0
- lidb-1.1.10/setup.cfg +4 -0
- lidb-1.1.10/tests/test_conf.py +14 -0
- lidb-1.1.10/tests/test_parse.py +18 -0
lidb-1.1.10/PKG-INFO
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: lidb
|
|
3
|
+
Version: 1.1.10
|
|
4
|
+
Summary: Light database for quantor
|
|
5
|
+
Requires-Python: >=3.12
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: dynaconf>=3.2.11
|
|
8
|
+
Requires-Dist: polars>=1.31.0
|
|
9
|
+
Requires-Dist: sqlparse>=0.5.3
|
|
10
|
+
Requires-Dist: logair>=1.0.1
|
|
11
|
+
Requires-Dist: clickhouse-df>=0.1.5
|
|
12
|
+
Requires-Dist: connectorx>=0.4.3
|
|
13
|
+
Requires-Dist: pymysql>=1.1.2
|
|
14
|
+
Requires-Dist: xcals>=0.0.4
|
|
15
|
+
Requires-Dist: ygo>=1.2.6
|
|
16
|
+
Requires-Dist: lark>=1.3.1
|
|
17
|
+
Requires-Dist: numpy>=2.3.1
|
lidb-1.1.10/README.md
ADDED
|
File without changes
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# Copyright (c) ZhangYundi.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
from .init import (
|
|
5
|
+
NAME,
|
|
6
|
+
DB_PATH,
|
|
7
|
+
CONFIG_PATH,
|
|
8
|
+
get_settings,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
from .database import (
|
|
12
|
+
sql,
|
|
13
|
+
put,
|
|
14
|
+
has,
|
|
15
|
+
tb_path,
|
|
16
|
+
read_mysql,
|
|
17
|
+
write_mysql,
|
|
18
|
+
execute_mysql,
|
|
19
|
+
read_ck,
|
|
20
|
+
scan,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
from .table import Table, TableMode
|
|
24
|
+
from .dataset import Dataset, load_ds
|
|
25
|
+
from .qdf import to_lazy, Expr
|
|
26
|
+
from .svc import DataService, D
|
|
27
|
+
|
|
28
|
+
from .parse import parse_hive_partition_structure
|
|
29
|
+
|
|
30
|
+
__version__ = "1.1.10"
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
---------------------------------------------
|
|
4
|
+
Copyright (c) 2025 ZhangYundi
|
|
5
|
+
Licensed under the MIT License.
|
|
6
|
+
Created on 2024/7/1 09:44
|
|
7
|
+
Email: yundi.xxii@outlook.com
|
|
8
|
+
---------------------------------------------
|
|
9
|
+
"""
|
|
10
|
+
import re
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Literal
|
|
13
|
+
|
|
14
|
+
import pymysql
|
|
15
|
+
|
|
16
|
+
from .parse import extract_table_names_from_sql
|
|
17
|
+
from .init import DB_PATH, logger, get_settings
|
|
18
|
+
import urllib
|
|
19
|
+
import polars as pl
|
|
20
|
+
|
|
21
|
+
# ======================== 本地数据库 catdb ========================
|
|
22
|
+
def tb_path(tb_name: str) -> Path:
|
|
23
|
+
"""
|
|
24
|
+
返回指定表名 完整的本地路径
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
tb_name: str
|
|
28
|
+
表名,路径写法: a/b/c
|
|
29
|
+
Returns
|
|
30
|
+
-------
|
|
31
|
+
pathlib.Path
|
|
32
|
+
full_abs_path: pathlib.Path
|
|
33
|
+
完整的本地绝对路径 $DB_PATH/a/b/c
|
|
34
|
+
"""
|
|
35
|
+
return Path(DB_PATH, tb_name)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def put(df, tb_name: str, partitions: list[str] | None = None):
|
|
39
|
+
"""
|
|
40
|
+
将一个DataFrame写入到指定名称的表格目录中,支持分区存储。
|
|
41
|
+
|
|
42
|
+
该函数负责将给定的DataFrame(df)根据提供的表名(tb_name)写入到本地文件系统中。
|
|
43
|
+
如果指定了分区(partitions),则会按照这些分区列将数据分割存储。如果目录不存在,会自动创建目录。
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
df: polars.DataFrame
|
|
48
|
+
tb_name: str
|
|
49
|
+
表的名称,用于确定存储数据的目录
|
|
50
|
+
partitions: list[str] | None
|
|
51
|
+
指定用于分区的列名列表。如果未提供,则不进行分区。
|
|
52
|
+
|
|
53
|
+
Returns
|
|
54
|
+
-------
|
|
55
|
+
|
|
56
|
+
"""
|
|
57
|
+
if df is None:
|
|
58
|
+
logger.warning(f"put failed: input data is None.")
|
|
59
|
+
return
|
|
60
|
+
if df.is_empty():
|
|
61
|
+
logger.warning(f"put failed: input data is empty.")
|
|
62
|
+
return
|
|
63
|
+
tbpath = tb_path(tb_name)
|
|
64
|
+
if not tbpath.exists():
|
|
65
|
+
tbpath.mkdir(parents=True, exist_ok=True)
|
|
66
|
+
if partitions is not None:
|
|
67
|
+
df.write_parquet(tbpath, partition_by=partitions)
|
|
68
|
+
else:
|
|
69
|
+
df.write_parquet(tbpath / "data.parquet")
|
|
70
|
+
|
|
71
|
+
def has(tb_name: str) -> bool:
|
|
72
|
+
"""
|
|
73
|
+
判定给定的表名是否存在
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
tb_name: str
|
|
77
|
+
|
|
78
|
+
Returns
|
|
79
|
+
-------
|
|
80
|
+
|
|
81
|
+
"""
|
|
82
|
+
return tb_path(tb_name).exists()
|
|
83
|
+
|
|
84
|
+
def sql(query: str, ):
|
|
85
|
+
"""
|
|
86
|
+
sql 查询,从本地paquet文件中查询数据
|
|
87
|
+
|
|
88
|
+
Parameters
|
|
89
|
+
----------
|
|
90
|
+
query: str
|
|
91
|
+
sql查询语句
|
|
92
|
+
Returns
|
|
93
|
+
-------
|
|
94
|
+
|
|
95
|
+
"""
|
|
96
|
+
import polars as pl
|
|
97
|
+
|
|
98
|
+
tbs = extract_table_names_from_sql(query)
|
|
99
|
+
convertor = dict()
|
|
100
|
+
for tb in tbs:
|
|
101
|
+
db_path = tb_path(tb)
|
|
102
|
+
format_tb = f"read_parquet('{db_path}/**/*.parquet')"
|
|
103
|
+
convertor[tb] = format_tb
|
|
104
|
+
pattern = re.compile("|".join(re.escape(k) for k in convertor.keys()))
|
|
105
|
+
new_query = pattern.sub(lambda m: convertor[m.group(0)], query)
|
|
106
|
+
return pl.sql(new_query)
|
|
107
|
+
|
|
108
|
+
def scan(tb: str,) -> pl.LazyFrame:
|
|
109
|
+
"""polars.scan_parquet"""
|
|
110
|
+
tb = tb_path(tb)
|
|
111
|
+
return pl.scan_parquet(tb)
|
|
112
|
+
|
|
113
|
+
def read_mysql(query: str, db_conf: str = "DATABASES.mysql"):
|
|
114
|
+
"""
|
|
115
|
+
从MySQL数据库中读取数据。
|
|
116
|
+
Parameters
|
|
117
|
+
----------
|
|
118
|
+
query: str
|
|
119
|
+
查询语句
|
|
120
|
+
db_conf: str
|
|
121
|
+
对应的配置 $DB_PATH/conf/settings.toml
|
|
122
|
+
Returns
|
|
123
|
+
-------
|
|
124
|
+
polars.DataFrame
|
|
125
|
+
"""
|
|
126
|
+
import polars as pl
|
|
127
|
+
try:
|
|
128
|
+
db_setting = get_settings().get(db_conf, {})
|
|
129
|
+
required_keys = ['user', 'password', 'url', 'db']
|
|
130
|
+
missing_keys = [key for key in required_keys if key not in db_setting]
|
|
131
|
+
if missing_keys:
|
|
132
|
+
raise KeyError(f"Missing required keys in database config: {missing_keys}")
|
|
133
|
+
|
|
134
|
+
user = urllib.parse.quote_plus(db_setting['user'])
|
|
135
|
+
password = urllib.parse.quote_plus(db_setting['password'])
|
|
136
|
+
uri = f"mysql://{user}:{password}@{db_setting['url']}/{db_setting['db']}"
|
|
137
|
+
return pl.read_database_uri(query, uri)
|
|
138
|
+
|
|
139
|
+
except KeyError as e:
|
|
140
|
+
raise RuntimeError("Database configuration error: missing required fields.") from e
|
|
141
|
+
except Exception as e:
|
|
142
|
+
raise RuntimeError(f"Failed to execute MySQL query: {e}") from e
|
|
143
|
+
|
|
144
|
+
def write_mysql(df: pl.DataFrame,
|
|
145
|
+
remote_tb: str,
|
|
146
|
+
db_conf: str,
|
|
147
|
+
if_table_exists: Literal["append", "replace", "fail"]="append"):
|
|
148
|
+
"""将 polars.DataFrame 写入mysql"""
|
|
149
|
+
try:
|
|
150
|
+
db_setting = get_settings().get(db_conf, {})
|
|
151
|
+
required_keys = ['user', 'password', 'url', 'db']
|
|
152
|
+
missing_keys = [key for key in required_keys if key not in db_setting]
|
|
153
|
+
if missing_keys:
|
|
154
|
+
raise KeyError(f"Missing required keys in database config: {missing_keys}")
|
|
155
|
+
|
|
156
|
+
user = urllib.parse.quote_plus(db_setting['user'])
|
|
157
|
+
password = urllib.parse.quote_plus(db_setting['password'])
|
|
158
|
+
uri = f"mysql+pymysql://{user}:{password}@{db_setting['url']}/{db_setting['db']}"
|
|
159
|
+
return df.write_database(remote_tb,
|
|
160
|
+
connection=uri,
|
|
161
|
+
if_table_exists=if_table_exists)
|
|
162
|
+
|
|
163
|
+
except KeyError as e:
|
|
164
|
+
raise RuntimeError("Database configuration error: missing required fields.") from e
|
|
165
|
+
except Exception as e:
|
|
166
|
+
raise RuntimeError(f"Failed to write MySQL: {e}") from e
|
|
167
|
+
|
|
168
|
+
def execute_mysql(sql: str, db_conf: str):
|
|
169
|
+
"""执行mysql语句"""
|
|
170
|
+
try:
|
|
171
|
+
db_setting = get_settings().get(db_conf, {})
|
|
172
|
+
required_keys = ['user', 'password', 'url', 'db']
|
|
173
|
+
missing_keys = [key for key in required_keys if key not in db_setting]
|
|
174
|
+
if missing_keys:
|
|
175
|
+
raise KeyError(f"Missing required keys in database config: {missing_keys}")
|
|
176
|
+
|
|
177
|
+
user = urllib.parse.quote_plus(db_setting['user'])
|
|
178
|
+
password = urllib.parse.quote_plus(db_setting['password'])
|
|
179
|
+
url = urllib.parse.quote_plus(db_setting["url"])
|
|
180
|
+
host, port = url.split(":")
|
|
181
|
+
|
|
182
|
+
except KeyError as e:
|
|
183
|
+
raise RuntimeError("Database configuration error: missing required fields.") from e
|
|
184
|
+
except Exception as e:
|
|
185
|
+
raise RuntimeError(f"Failed to parse config: {e}") from e
|
|
186
|
+
|
|
187
|
+
connection = pymysql.connect(
|
|
188
|
+
host=host,
|
|
189
|
+
port=port,
|
|
190
|
+
user=user,
|
|
191
|
+
password=password,
|
|
192
|
+
database=db_setting['db'] # or extract from connection string
|
|
193
|
+
)
|
|
194
|
+
try:
|
|
195
|
+
with connection.cursor() as cursor:
|
|
196
|
+
cursor.execute(sql)
|
|
197
|
+
connection.commit()
|
|
198
|
+
except Exception as e:
|
|
199
|
+
raise RuntimeError(f"Failed to execute MySQL: {e}") from e
|
|
200
|
+
finally:
|
|
201
|
+
connection.close()
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def read_ck(query: str, db_conf: str = "DATABASES.ck"):
|
|
205
|
+
"""
|
|
206
|
+
从Clickhouse集群读取数据。
|
|
207
|
+
Parameters
|
|
208
|
+
----------
|
|
209
|
+
query: str
|
|
210
|
+
查询语句
|
|
211
|
+
db_conf: str
|
|
212
|
+
对应的配置 $DB_PATH/conf/settings.toml
|
|
213
|
+
Returns
|
|
214
|
+
-------
|
|
215
|
+
polars.DataFrame
|
|
216
|
+
"""
|
|
217
|
+
import clickhouse_df
|
|
218
|
+
try:
|
|
219
|
+
db_setting = get_settings().get(db_conf, {})
|
|
220
|
+
required_keys = ['user', 'password', 'urls']
|
|
221
|
+
missing_keys = [key for key in required_keys if key not in db_setting]
|
|
222
|
+
if missing_keys:
|
|
223
|
+
raise KeyError(f"Missing required keys in database config: {missing_keys}")
|
|
224
|
+
|
|
225
|
+
user = urllib.parse.quote_plus(db_setting['user'])
|
|
226
|
+
password = urllib.parse.quote_plus(db_setting['password'])
|
|
227
|
+
|
|
228
|
+
with clickhouse_df.connect(db_setting['urls'], user=user, password=password):
|
|
229
|
+
return clickhouse_df.to_polars(query)
|
|
230
|
+
|
|
231
|
+
except KeyError as e:
|
|
232
|
+
raise RuntimeError("Database configuration error: missing required fields.") from e
|
|
233
|
+
except Exception as e:
|
|
234
|
+
raise RuntimeError(f"Failed to execute ClickHouse query: {e}") from e
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
# Copyright (c) ZhangYundi.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
# Created on 2025/10/27 14:13
|
|
4
|
+
# Description:
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from enum import Enum
|
|
9
|
+
|
|
10
|
+
from functools import partial
|
|
11
|
+
import polars.selectors as cs
|
|
12
|
+
import polars as pl
|
|
13
|
+
from typing import Callable, Literal
|
|
14
|
+
import logair
|
|
15
|
+
import xcals
|
|
16
|
+
import ygo
|
|
17
|
+
from .database import put, tb_path, scan, DB_PATH
|
|
18
|
+
from .parse import parse_hive_partition_structure
|
|
19
|
+
|
|
20
|
+
class InstrumentType(Enum):
|
|
21
|
+
|
|
22
|
+
STOCK = "Stock" # 股票
|
|
23
|
+
ETF = "ETF" #
|
|
24
|
+
CB = "ConvertibleBond" # 可转债
|
|
25
|
+
|
|
26
|
+
def complete_data(fn, date, save_path, partitions):
|
|
27
|
+
|
|
28
|
+
logger = logair.get_logger(__name__)
|
|
29
|
+
try:
|
|
30
|
+
data = fn(date=date)
|
|
31
|
+
if data is None:
|
|
32
|
+
# 保存数据的逻辑在fn中实现了
|
|
33
|
+
return
|
|
34
|
+
# 剔除以 `_` 开头的列
|
|
35
|
+
data = data.select(~cs.starts_with("_"))
|
|
36
|
+
if not isinstance(data, (pl.DataFrame, pl.LazyFrame)):
|
|
37
|
+
logger.error(f"{save_path}: Result of dataset.fn must be polars.DataFrame or polars.LazyFrame.")
|
|
38
|
+
return
|
|
39
|
+
if isinstance(data, pl.LazyFrame):
|
|
40
|
+
data = data.collect()
|
|
41
|
+
cols = data.columns
|
|
42
|
+
if "date" not in cols:
|
|
43
|
+
data = data.with_columns(pl.lit(date).alias("date")).select("date", *cols)
|
|
44
|
+
|
|
45
|
+
put(data, save_path, partitions=partitions)
|
|
46
|
+
except Exception as e:
|
|
47
|
+
logger.error(f"{save_path}: Error when complete data for {date}")
|
|
48
|
+
logger.warning(e)
|
|
49
|
+
|
|
50
|
+
class Dataset:
|
|
51
|
+
|
|
52
|
+
def __init__(self,
|
|
53
|
+
fn: Callable[..., pl.DataFrame],
|
|
54
|
+
tb: str,
|
|
55
|
+
update_time: str = "",
|
|
56
|
+
partitions: list[str] = None,
|
|
57
|
+
by_asset: bool = True,
|
|
58
|
+
by_time: bool = False):
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
Parameters
|
|
62
|
+
----------
|
|
63
|
+
fn: str
|
|
64
|
+
数据集计算函数
|
|
65
|
+
tb: str
|
|
66
|
+
数据集保存表格
|
|
67
|
+
update_time: str
|
|
68
|
+
更新时间: 默认没有-实时更新,也就是可以取到当天值
|
|
69
|
+
partitions: list[str]
|
|
70
|
+
分区
|
|
71
|
+
by_asset: bool
|
|
72
|
+
是否按照标的进行分区,默认 True
|
|
73
|
+
by_time: bool
|
|
74
|
+
是否按照标的进行分区,默认 False
|
|
75
|
+
"""
|
|
76
|
+
self.fn = fn
|
|
77
|
+
self.fn_params_sig = ygo.fn_signature_params(fn)
|
|
78
|
+
self._by_asset = by_asset
|
|
79
|
+
self._by_time = by_time
|
|
80
|
+
self._append_partitions = ["asset", "date"] if by_asset else ["date", ]
|
|
81
|
+
if by_time:
|
|
82
|
+
self._append_partitions.append("time")
|
|
83
|
+
if partitions is not None:
|
|
84
|
+
partitions = [k for k in partitions if k not in self._append_partitions]
|
|
85
|
+
partitions = [*partitions, *self._append_partitions]
|
|
86
|
+
else:
|
|
87
|
+
partitions = self._append_partitions
|
|
88
|
+
self.partitions = partitions
|
|
89
|
+
self._type_asset = "asset" in self.fn_params_sig
|
|
90
|
+
self.update_time = update_time
|
|
91
|
+
|
|
92
|
+
self.tb = tb
|
|
93
|
+
self.save_path = tb_path(tb)
|
|
94
|
+
fn_params = ygo.fn_params(self.fn)
|
|
95
|
+
self.fn_params = {k: v for (k, v) in fn_params}
|
|
96
|
+
self.constraints = dict()
|
|
97
|
+
for k in self.partitions[:-len(self._append_partitions)]:
|
|
98
|
+
if k in self.fn_params:
|
|
99
|
+
v = self.fn_params[k]
|
|
100
|
+
if isinstance(v, (list, tuple)) and not isinstance(v, str):
|
|
101
|
+
v = sorted(v)
|
|
102
|
+
self.constraints[k] = v
|
|
103
|
+
self.save_path = self.save_path / f"{k}={v}"
|
|
104
|
+
|
|
105
|
+
def is_empty(self, path) -> bool:
|
|
106
|
+
return not any(path.rglob("*.parquet"))
|
|
107
|
+
|
|
108
|
+
def __call__(self, *fn_args, **fn_kwargs):
|
|
109
|
+
# self.fn =
|
|
110
|
+
fn = partial(self.fn, *fn_args, **fn_kwargs)
|
|
111
|
+
ds = Dataset(fn=fn,
|
|
112
|
+
tb=self.tb,
|
|
113
|
+
partitions=self.partitions,
|
|
114
|
+
by_asset=self._by_asset,
|
|
115
|
+
by_time=self._by_time,
|
|
116
|
+
update_time=self.update_time)
|
|
117
|
+
return ds
|
|
118
|
+
|
|
119
|
+
def get_value(self, date, eager: bool = True, **constraints):
|
|
120
|
+
"""
|
|
121
|
+
取值: 不保证未来数据
|
|
122
|
+
Parameters
|
|
123
|
+
----------
|
|
124
|
+
date: str
|
|
125
|
+
取值日期
|
|
126
|
+
eager: bool
|
|
127
|
+
constraints: dict
|
|
128
|
+
取值的过滤条件
|
|
129
|
+
|
|
130
|
+
Returns
|
|
131
|
+
-------
|
|
132
|
+
|
|
133
|
+
"""
|
|
134
|
+
_constraints = {k: v for k, v in constraints.items() if k in self.partitions}
|
|
135
|
+
_limits = {k: v for k, v in constraints.items() if k not in self.partitions}
|
|
136
|
+
search_path = self.save_path
|
|
137
|
+
for k, v in _constraints.items():
|
|
138
|
+
if isinstance(v, (list, tuple)) and not isinstance(v, str):
|
|
139
|
+
v = sorted(v)
|
|
140
|
+
search_path = search_path / f"{k}={v}"
|
|
141
|
+
search_path = search_path / f"date={date}"
|
|
142
|
+
|
|
143
|
+
if not self.is_empty(search_path):
|
|
144
|
+
lf = scan(search_path).cast({"date": pl.Utf8})
|
|
145
|
+
schema = lf.collect_schema()
|
|
146
|
+
_limits = {k: v for k, v in constraints.items() if schema.get(k) is not None}
|
|
147
|
+
lf = lf.filter(date=date, **_limits)
|
|
148
|
+
if not eager:
|
|
149
|
+
return lf
|
|
150
|
+
data = lf.collect()
|
|
151
|
+
if not data.is_empty():
|
|
152
|
+
return data
|
|
153
|
+
fn = self.fn
|
|
154
|
+
save_path = self.save_path
|
|
155
|
+
|
|
156
|
+
if self._type_asset:
|
|
157
|
+
if "asset" in _constraints:
|
|
158
|
+
fn = ygo.delay(self.fn)(asset=_constraints["asset"])
|
|
159
|
+
if len(self.constraints) < len(self.partitions) - len(self._append_partitions):
|
|
160
|
+
# 如果分区指定的字段没有在Dataset定义中指定,需要在get_value中指定
|
|
161
|
+
params = dict()
|
|
162
|
+
for k in self.partitions[:-len(self._append_partitions)]:
|
|
163
|
+
if k not in self.constraints:
|
|
164
|
+
v = constraints[k]
|
|
165
|
+
params[k] = v
|
|
166
|
+
save_path =save_path / f"{k}={v}"
|
|
167
|
+
fn = ygo.delay(self.fn)(**params)
|
|
168
|
+
logger = logair.get_logger(__name__)
|
|
169
|
+
|
|
170
|
+
today = xcals.today()
|
|
171
|
+
now = xcals.now()
|
|
172
|
+
if (date > today) or (date == today and now < self.update_time):
|
|
173
|
+
logger.warning(f"{self.tb}: {date} is not ready, waiting for {self.update_time}")
|
|
174
|
+
return
|
|
175
|
+
complete_data(fn, date, save_path, self._append_partitions)
|
|
176
|
+
|
|
177
|
+
lf = scan(search_path).cast({"date": pl.Utf8})
|
|
178
|
+
schema = lf.collect_schema()
|
|
179
|
+
_limits = {k: v for k, v in constraints.items() if schema.get(k) is not None}
|
|
180
|
+
lf = lf.filter(date=date, **_limits)
|
|
181
|
+
if not eager:
|
|
182
|
+
return lf
|
|
183
|
+
return lf.collect()
|
|
184
|
+
|
|
185
|
+
def get_pit(self, date: str, query_time: str, eager: bool = True, **contraints):
|
|
186
|
+
"""取值:如果取值时间早于更新时间,则返回上一天的值"""
|
|
187
|
+
if not self.update_time:
|
|
188
|
+
return self.get_value(date, **contraints)
|
|
189
|
+
val_date = date
|
|
190
|
+
if query_time < self.update_time:
|
|
191
|
+
val_date = xcals.shift_tradeday(date, -1)
|
|
192
|
+
return self.get_value(val_date, eager=eager, **contraints).with_columns(date=pl.lit(date),)
|
|
193
|
+
|
|
194
|
+
def get_history(self,
|
|
195
|
+
dateList: list[str],
|
|
196
|
+
n_jobs: int = 5,
|
|
197
|
+
backend: Literal["threading", "multiprocessing", "loky"] = "loky",
|
|
198
|
+
eager: bool = True,
|
|
199
|
+
rep_asset: str = "000001", # 默认 000001
|
|
200
|
+
**constraints):
|
|
201
|
+
"""获取历史值: 不保证未来数据"""
|
|
202
|
+
_constraints = {k: v for k, v in constraints.items() if k in self.partitions}
|
|
203
|
+
search_path = self.save_path
|
|
204
|
+
for k, v in _constraints.items():
|
|
205
|
+
if isinstance(v, (list, tuple)) and not isinstance(v, str):
|
|
206
|
+
v = sorted(v)
|
|
207
|
+
search_path = search_path / f"{k}={v}"
|
|
208
|
+
if self.is_empty(search_path):
|
|
209
|
+
# 需要补全全部数据
|
|
210
|
+
missing_dates = dateList
|
|
211
|
+
else:
|
|
212
|
+
if not self._type_asset:
|
|
213
|
+
_search_path = self.save_path
|
|
214
|
+
for k, v in _constraints.items():
|
|
215
|
+
if k != "asset":
|
|
216
|
+
_search_path = _search_path / f"{k}={v}"
|
|
217
|
+
else:
|
|
218
|
+
_search_path = _search_path / f"asset={rep_asset}"
|
|
219
|
+
hive_info = parse_hive_partition_structure(_search_path)
|
|
220
|
+
else:
|
|
221
|
+
hive_info = parse_hive_partition_structure(search_path)
|
|
222
|
+
exist_dates = hive_info["date"].to_list()
|
|
223
|
+
missing_dates = set(dateList).difference(set(exist_dates))
|
|
224
|
+
missing_dates = sorted(list(missing_dates))
|
|
225
|
+
if missing_dates:
|
|
226
|
+
fn = self.fn
|
|
227
|
+
save_path = self.save_path
|
|
228
|
+
|
|
229
|
+
if self._type_asset:
|
|
230
|
+
if "asset" in _constraints:
|
|
231
|
+
fn = ygo.delay(self.fn)(asset=_constraints["asset"])
|
|
232
|
+
|
|
233
|
+
if len(self.constraints) < len(self.partitions) - len(self._append_partitions):
|
|
234
|
+
params = dict()
|
|
235
|
+
for k in self.partitions[:-len(self._append_partitions)]:
|
|
236
|
+
if k not in self.constraints:
|
|
237
|
+
v = constraints[k]
|
|
238
|
+
params[k] = v
|
|
239
|
+
save_path =save_path / f"{k}={v}"
|
|
240
|
+
fn = ygo.delay(self.fn)(**params)
|
|
241
|
+
|
|
242
|
+
with ygo.pool(n_jobs=n_jobs, backend=backend) as go:
|
|
243
|
+
info_path = self.save_path
|
|
244
|
+
try:
|
|
245
|
+
info_path = info_path.relative_to(DB_PATH)
|
|
246
|
+
except:
|
|
247
|
+
pass
|
|
248
|
+
for date in missing_dates:
|
|
249
|
+
go.submit(complete_data, job_name=f"Completing {info_path}")(
|
|
250
|
+
fn=fn,
|
|
251
|
+
date=date,
|
|
252
|
+
save_path=save_path,
|
|
253
|
+
partitions=self._append_partitions,
|
|
254
|
+
)
|
|
255
|
+
go.do()
|
|
256
|
+
data = scan(search_path, ).cast({"date": pl.Utf8}).filter(pl.col("date").is_in(dateList), **constraints)
|
|
257
|
+
data = data.sort("date")
|
|
258
|
+
if eager:
|
|
259
|
+
return data.collect()
|
|
260
|
+
return data
|
|
261
|
+
|
|
262
|
+
def loader(ds: Dataset,
|
|
263
|
+
date_list: list[str],
|
|
264
|
+
prev_date_list: list[str],
|
|
265
|
+
prev_date_mapping: dict[str, str],
|
|
266
|
+
time: str,
|
|
267
|
+
**constraints) -> pl.LazyFrame:
|
|
268
|
+
if time < ds.update_time:
|
|
269
|
+
lf = ds.get_history(prev_date_list, eager=False, **constraints)
|
|
270
|
+
else:
|
|
271
|
+
lf = ds.get_history(date_list, eager=False, **constraints)
|
|
272
|
+
schema = lf.collect_schema()
|
|
273
|
+
include_time = schema.get("time") is not None
|
|
274
|
+
if include_time:
|
|
275
|
+
lf = lf.filter(time=time)
|
|
276
|
+
else:
|
|
277
|
+
lf = lf.with_columns(time=pl.lit(time))
|
|
278
|
+
if time < ds.update_time:
|
|
279
|
+
lf = lf.with_columns(date=pl.col("date").replace(prev_date_mapping))
|
|
280
|
+
return lf
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def load_ds(ds_list: list[Dataset],
|
|
284
|
+
beg_date: str,
|
|
285
|
+
end_date: str,
|
|
286
|
+
time: str,
|
|
287
|
+
n_jobs: int = 7,
|
|
288
|
+
backend: Literal["threading", "multiprocessing", "loky"] = "threading",
|
|
289
|
+
**constraints) -> pl.DataFrame:
|
|
290
|
+
"""加载数据集:时间对齐"""
|
|
291
|
+
lfList = list()
|
|
292
|
+
date_list = xcals.get_tradingdays(beg_date, end_date)
|
|
293
|
+
beg_date, end_date = date_list[0], date_list[-1]
|
|
294
|
+
prev_date_list = xcals.get_tradingdays(xcals.shift_tradeday(beg_date, -1), xcals.shift_tradeday(end_date, -1))
|
|
295
|
+
prev_date_mapping = {prev_date: date_list[i] for i, prev_date in enumerate(prev_date_list)}
|
|
296
|
+
with ygo.pool(n_jobs=n_jobs, backend=backend) as go:
|
|
297
|
+
for ds in ds_list:
|
|
298
|
+
go.submit(loader)(ds=ds,
|
|
299
|
+
date_list=date_list,
|
|
300
|
+
prev_date_list=prev_date_list,
|
|
301
|
+
prev_date_mapping=prev_date_mapping,
|
|
302
|
+
time=time,
|
|
303
|
+
**constraints)
|
|
304
|
+
for lf in go.do():
|
|
305
|
+
lfList.append(lf)
|
|
306
|
+
index = ("date", "time", "asset")
|
|
307
|
+
LF = pl.concat(lfList, how="align")
|
|
308
|
+
LF = LF.sort("date", "time", "asset")
|
|
309
|
+
return LF.select(*index, cs.exclude(index)).collect()
|
|
310
|
+
|
lidb-1.1.10/lidb/init.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# Copyright (c) ZhangYundi.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
# Created on 2025/7/17 14:40
|
|
4
|
+
# Description:
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from dynaconf import Dynaconf
|
|
8
|
+
import logair
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
USERHOME = Path("~").expanduser() # 用户家目录
|
|
12
|
+
NAME = "lidb"
|
|
13
|
+
DB_PATH = USERHOME / NAME
|
|
14
|
+
CONFIG_PATH = USERHOME / ".config" / NAME / "settings.toml"
|
|
15
|
+
|
|
16
|
+
logger = logair.get_logger(NAME)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
if not CONFIG_PATH.exists():
|
|
20
|
+
try:
|
|
21
|
+
CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
|
|
22
|
+
except Exception as e:
|
|
23
|
+
logger.error(f"Failed to create settings file: {e}")
|
|
24
|
+
with open(CONFIG_PATH, "w") as f:
|
|
25
|
+
template_content = f'[global]\npath="{DB_PATH}"\n'
|
|
26
|
+
with open(CONFIG_PATH, "w") as f:
|
|
27
|
+
f.write(template_content)
|
|
28
|
+
logger.info(f"Settings file created: {CONFIG_PATH}")
|
|
29
|
+
|
|
30
|
+
def get_settings():
|
|
31
|
+
try:
|
|
32
|
+
return Dynaconf(settings_files=[CONFIG_PATH])
|
|
33
|
+
except Exception as e:
|
|
34
|
+
logger.error(f"Read settings file failed: {e}")
|
|
35
|
+
return {}
|
|
36
|
+
|
|
37
|
+
# 读取配置文件覆盖
|
|
38
|
+
_settiings = get_settings()
|
|
39
|
+
if _settiings is not None:
|
|
40
|
+
setting_db_path = _settiings.get(f"global.path", "")
|
|
41
|
+
if setting_db_path:
|
|
42
|
+
DB_PATH = Path(setting_db_path)
|