vnpy_questdb 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,218 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[codz]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py.cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ # Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ # poetry.lock
109
+ # poetry.toml
110
+
111
+ # pdm
112
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
113
+ # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
114
+ # https://pdm-project.org/en/latest/usage/project/#working-with-version-control
115
+ # pdm.lock
116
+ # pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # pixi
121
+ # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
122
+ # pixi.lock
123
+ # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
124
+ # in the .venv directory. It is recommended not to include this directory in version control.
125
+ .pixi
126
+
127
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
128
+ __pypackages__/
129
+
130
+ # Celery stuff
131
+ celerybeat-schedule
132
+ celerybeat.pid
133
+
134
+ # Redis
135
+ *.rdb
136
+ *.aof
137
+ *.pid
138
+
139
+ # RabbitMQ
140
+ mnesia/
141
+ rabbitmq/
142
+ rabbitmq-data/
143
+
144
+ # ActiveMQ
145
+ activemq-data/
146
+
147
+ # SageMath parsed files
148
+ *.sage.py
149
+
150
+ # Environments
151
+ .env
152
+ .envrc
153
+ .venv
154
+ env/
155
+ venv/
156
+ ENV/
157
+ env.bak/
158
+ venv.bak/
159
+
160
+ # Spyder project settings
161
+ .spyderproject
162
+ .spyproject
163
+
164
+ # Rope project settings
165
+ .ropeproject
166
+
167
+ # mkdocs documentation
168
+ /site
169
+
170
+ # mypy
171
+ .mypy_cache/
172
+ .dmypy.json
173
+ dmypy.json
174
+
175
+ # Pyre type checker
176
+ .pyre/
177
+
178
+ # pytype static type analyzer
179
+ .pytype/
180
+
181
+ # Cython debug symbols
182
+ cython_debug/
183
+
184
+ # PyCharm
185
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
186
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
187
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
188
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
189
+ # .idea/
190
+
191
+ # Abstra
192
+ # Abstra is an AI-powered process automation framework.
193
+ # Ignore directories containing user credentials, local state, and settings.
194
+ # Learn more at https://abstra.io/docs
195
+ .abstra/
196
+
197
+ # Visual Studio Code
198
+ # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
199
+ # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
200
+ # and can be added to the global gitignore or merged into this file. However, if you prefer,
201
+ # you could uncomment the following to ignore the entire vscode folder
202
+ # .vscode/
203
+ # Temporary file for partial code execution
204
+ tempCodeRunnerFile.py
205
+
206
+ # Ruff stuff:
207
+ .ruff_cache/
208
+
209
+ # PyPI configuration file
210
+ .pypirc
211
+
212
+ # Marimo
213
+ marimo/_static/
214
+ marimo/_lsp/
215
+ __marimo__/
216
+
217
+ # Streamlit
218
+ .streamlit/secrets.toml
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2026-present, Xiaoyou Chen
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,107 @@
1
+ Metadata-Version: 2.4
2
+ Name: vnpy_questdb
3
+ Version: 1.0.0
4
+ Summary: QuestDB database adapter for VeighNa quant trading framework.
5
+ Project-URL: Homepage, https://www.vnpy.com
6
+ Project-URL: Documentation, https://www.vnpy.com/docs
7
+ Project-URL: Changes, https://github.com/vnpy/vnpy_questdb/blob/master/CHANGELOG.md
8
+ Project-URL: Source, https://github.com/vnpy/vnpy_questdb/
9
+ Project-URL: Forum, https://www.vnpy.com/forum
10
+ Author-email: Xiaoyou Chen <xiaoyou.chen@mail.vnpy.com>
11
+ License: MIT
12
+ License-File: LICENSE
13
+ Keywords: algotrading,investment,quant,quantitative,trading
14
+ Classifier: Development Status :: 4 - Beta
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Natural Language :: Chinese (Simplified)
17
+ Classifier: Operating System :: OS Independent
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Programming Language :: Python :: 3.11
21
+ Classifier: Programming Language :: Python :: 3.12
22
+ Classifier: Programming Language :: Python :: 3.13
23
+ Classifier: Programming Language :: Python :: Implementation :: CPython
24
+ Classifier: Topic :: Office/Business :: Financial :: Investment
25
+ Classifier: Typing :: Typed
26
+ Requires-Python: >=3.10
27
+ Requires-Dist: psycopg[binary]>=3.2
28
+ Requires-Dist: questdb>=4.0
29
+ Provides-Extra: test
30
+ Requires-Dist: numpy; extra == 'test'
31
+ Requires-Dist: pandas; extra == 'test'
32
+ Requires-Dist: pyarrow; extra == 'test'
33
+ Description-Content-Type: text/markdown
34
+
35
+ # VeighNa框架的QuestDB数据库接口
36
+
37
+ <p align="center">
38
+ <img src ="https://vnpy.oss-cn-shanghai.aliyuncs.com/vnpy-logo.png"/>
39
+ </p>
40
+
41
+ <p align="center">
42
+ <img src ="https://img.shields.io/badge/version-1.0.0-blueviolet.svg"/>
43
+ <img src ="https://img.shields.io/badge/platform-windows|linux|macos-yellow.svg"/>
44
+ <img src ="https://img.shields.io/badge/python-3.10|3.11|3.12|3.13-blue.svg" />
45
+ </p>
46
+
47
+ ## 说明
48
+
49
+ 对接 QuestDB 的高性能时序数据库接口实现。
50
+
51
+ 主要特性:
52
+
53
+ - 通过 QuestDB ILP/HTTP 高速写入K线和Tick数据。
54
+ - 通过 PGWire 执行建表、查询、汇总和逻辑删除。
55
+ - 使用 QuestDB WAL 表和 `DEDUP UPSERT KEYS` 支持重复数据覆盖。
56
+ - 使用 `deleted` 字段进行逻辑删除,查询和汇总时自动过滤已删除数据。
57
+
58
+ ## 安装
59
+
60
+ ```bash
61
+ pip install vnpy_questdb
62
+ ```
63
+
64
+ ## QuestDB 端口说明
65
+
66
+ 当前实现会同时使用 QuestDB 的两个服务端口:
67
+
68
+ - `database.port`:PGWire SQL端口,默认 `8812`。代码使用 `psycopg` 连接该端口,用于创建表、读取数据、查询汇总、执行逻辑删除,以及检查 WAL 事务是否已经应用。
69
+ - `database.http_port`:HTTP端口,默认 `9000`。代码使用 `questdb.ingress.Sender` 通过 ILP/HTTP 写入K线和Tick数据。
70
+
71
+ 这两个端口的职责不同,不能互相替代。`8812` 面向 PostgreSQL Wire Protocol 查询;`9000` 面向 QuestDB HTTP服务,其中包含 ILP/HTTP 写入入口和 Web Console。如果使用 Docker、远程服务器或防火墙,需要同时开放这两个端口。
72
+
73
+ 示例 Docker 端口映射:
74
+
75
+ ```bash
76
+ docker run --rm -p 8812:8812 -p 9000:9000 questdb/questdb
77
+ ```
78
+
79
+ 当前版本使用 ILP/HTTP 写入,不使用 QuestDB 的 ILP/TCP `9009` 端口。
80
+
81
+ ## VeighNa 配置
82
+
83
+ 在 VeighNa 配置文件中选择 QuestDB 数据库,并配置连接参数:
84
+
85
+ ```json
86
+ {
87
+ "database.name": "questdb",
88
+ "database.host": "localhost",
89
+ "database.port": 8812,
90
+ "database.user": "admin",
91
+ "database.password": "quest",
92
+ "database.database": "qdb",
93
+ "database.http_port": 9000
94
+ }
95
+ ```
96
+
97
+ 参数说明:
98
+
99
+ - `database.name`:数据库适配器名称,使用本插件时配置为 `questdb`。
100
+ - `database.host`:QuestDB 服务地址,PGWire 和 HTTP ILP 都会连接该地址。
101
+ - `database.port`:PGWire SQL端口,用于查询和管理操作。
102
+ - `database.user`:PGWire 用户名,QuestDB 默认值为 `admin`。
103
+ - `database.password`:PGWire 密码,QuestDB 默认值为 `quest`。
104
+ - `database.database`:PGWire 数据库名,QuestDB 默认值为 `qdb`。
105
+ - `database.http_port`:QuestDB HTTP端口,用于 ILP/HTTP 数据写入。
106
+
107
+ 如果 QuestDB 部署在远程主机,请确认 `database.host` 可以从运行 VeighNa 的机器访问,并确认 `database.port` 与 `database.http_port` 均已开放。
@@ -0,0 +1,73 @@
1
+ # VeighNa框架的QuestDB数据库接口
2
+
3
+ <p align="center">
4
+ <img src ="https://vnpy.oss-cn-shanghai.aliyuncs.com/vnpy-logo.png"/>
5
+ </p>
6
+
7
+ <p align="center">
8
+ <img src ="https://img.shields.io/badge/version-1.0.0-blueviolet.svg"/>
9
+ <img src ="https://img.shields.io/badge/platform-windows|linux|macos-yellow.svg"/>
10
+ <img src ="https://img.shields.io/badge/python-3.10|3.11|3.12|3.13-blue.svg" />
11
+ </p>
12
+
13
+ ## 说明
14
+
15
+ 对接 QuestDB 的高性能时序数据库接口实现。
16
+
17
+ 主要特性:
18
+
19
+ - 通过 QuestDB ILP/HTTP 高速写入K线和Tick数据。
20
+ - 通过 PGWire 执行建表、查询、汇总和逻辑删除。
21
+ - 使用 QuestDB WAL 表和 `DEDUP UPSERT KEYS` 支持重复数据覆盖。
22
+ - 使用 `deleted` 字段进行逻辑删除,查询和汇总时自动过滤已删除数据。
23
+
24
+ ## 安装
25
+
26
+ ```bash
27
+ pip install vnpy_questdb
28
+ ```
29
+
30
+ ## QuestDB 端口说明
31
+
32
+ 当前实现会同时使用 QuestDB 的两个服务端口:
33
+
34
+ - `database.port`:PGWire SQL端口,默认 `8812`。代码使用 `psycopg` 连接该端口,用于创建表、读取数据、查询汇总、执行逻辑删除,以及检查 WAL 事务是否已经应用。
35
+ - `database.http_port`:HTTP端口,默认 `9000`。代码使用 `questdb.ingress.Sender` 通过 ILP/HTTP 写入K线和Tick数据。
36
+
37
+ 这两个端口的职责不同,不能互相替代。`8812` 面向 PostgreSQL Wire Protocol 查询;`9000` 面向 QuestDB HTTP服务,其中包含 ILP/HTTP 写入入口和 Web Console。如果使用 Docker、远程服务器或防火墙,需要同时开放这两个端口。
38
+
39
+ 示例 Docker 端口映射:
40
+
41
+ ```bash
42
+ docker run --rm -p 8812:8812 -p 9000:9000 questdb/questdb
43
+ ```
44
+
45
+ 当前版本使用 ILP/HTTP 写入,不使用 QuestDB 的 ILP/TCP `9009` 端口。
46
+
47
+ ## VeighNa 配置
48
+
49
+ 在 VeighNa 配置文件中选择 QuestDB 数据库,并配置连接参数:
50
+
51
+ ```json
52
+ {
53
+ "database.name": "questdb",
54
+ "database.host": "localhost",
55
+ "database.port": 8812,
56
+ "database.user": "admin",
57
+ "database.password": "quest",
58
+ "database.database": "qdb",
59
+ "database.http_port": 9000
60
+ }
61
+ ```
62
+
63
+ 参数说明:
64
+
65
+ - `database.name`:数据库适配器名称,使用本插件时配置为 `questdb`。
66
+ - `database.host`:QuestDB 服务地址,PGWire 和 HTTP ILP 都会连接该地址。
67
+ - `database.port`:PGWire SQL端口,用于查询和管理操作。
68
+ - `database.user`:PGWire 用户名,QuestDB 默认值为 `admin`。
69
+ - `database.password`:PGWire 密码,QuestDB 默认值为 `quest`。
70
+ - `database.database`:PGWire 数据库名,QuestDB 默认值为 `qdb`。
71
+ - `database.http_port`:QuestDB HTTP端口,用于 ILP/HTTP 数据写入。
72
+
73
+ 如果 QuestDB 部署在远程主机,请确认 `database.host` 可以从运行 VeighNa 的机器访问,并确认 `database.port` 与 `database.http_port` 均已开放。
@@ -0,0 +1,85 @@
1
+ [project]
2
+ name = "vnpy_questdb"
3
+ dynamic = ["version"]
4
+ description = "QuestDB database adapter for VeighNa quant trading framework."
5
+ readme = "README.md"
6
+ license = {text = "MIT"}
7
+ authors = [{name = "Xiaoyou Chen", email = "xiaoyou.chen@mail.vnpy.com"}]
8
+ classifiers = [
9
+ "Development Status :: 4 - Beta",
10
+ "License :: OSI Approved :: MIT License",
11
+ "Operating System :: OS Independent",
12
+ "Programming Language :: Python :: 3",
13
+ "Programming Language :: Python :: 3.10",
14
+ "Programming Language :: Python :: 3.11",
15
+ "Programming Language :: Python :: 3.12",
16
+ "Programming Language :: Python :: 3.13",
17
+ "Topic :: Office/Business :: Financial :: Investment",
18
+ "Programming Language :: Python :: Implementation :: CPython",
19
+ "Natural Language :: Chinese (Simplified)",
20
+ "Typing :: Typed"
21
+ ]
22
+ requires-python = ">=3.10"
23
+ dependencies = [
24
+ "questdb>=4.0",
25
+ "psycopg[binary]>=3.2",
26
+ ]
27
+ keywords = ["quant", "quantitative", "investment", "trading", "algotrading"]
28
+
29
+ [project.optional-dependencies]
30
+ test = [
31
+ "numpy",
32
+ "pandas",
33
+ "pyarrow",
34
+ ]
35
+
36
+ [project.urls]
37
+ "Homepage" = "https://www.vnpy.com"
38
+ "Documentation" = "https://www.vnpy.com/docs"
39
+ "Changes" = "https://github.com/vnpy/vnpy_questdb/blob/master/CHANGELOG.md"
40
+ "Source" = "https://github.com/vnpy/vnpy_questdb/"
41
+ "Forum" = "https://www.vnpy.com/forum"
42
+
43
+ [build-system]
44
+ requires = ["hatchling>=1.27.0"]
45
+ build-backend = "hatchling.build"
46
+
47
+ [tool.hatch.version]
48
+ path = "vnpy_questdb/__init__.py"
49
+ pattern = "__version__ = ['\"](?P<version>[^'\"]+)['\"]"
50
+
51
+ [tool.hatch.build.targets.wheel]
52
+ packages = ["vnpy_questdb"]
53
+ include-package-data = true
54
+
55
+ [tool.hatch.build.targets.sdist]
56
+ include = ["vnpy_questdb*"]
57
+
58
+ [tool.ruff]
59
+ target-version = "py310"
60
+ output-format = "full"
61
+
62
+ [tool.ruff.lint]
63
+ select = [
64
+ "B", # flake8-bugbear
65
+ "E", # pycodestyle error
66
+ "F", # pyflakes
67
+ "UP", # pyupgrade
68
+ "W", # pycodestyle warning
69
+ ]
70
+ ignore = ["E501"]
71
+
72
+ [tool.mypy]
73
+ python_version = "3.10"
74
+ warn_return_any = true
75
+ warn_unused_configs = true
76
+ disallow_untyped_defs = true
77
+ disallow_incomplete_defs = true
78
+ check_untyped_defs = true
79
+ disallow_untyped_decorators = true
80
+ no_implicit_optional = true
81
+ strict_optional = true
82
+ warn_redundant_casts = true
83
+ warn_unused_ignores = true
84
+ warn_no_return = true
85
+ ignore_missing_imports = true
@@ -0,0 +1,7 @@
1
+ from .questdb_database import QuestdbDatabase as Database
2
+
3
+
4
+ __all__ = ["Database"]
5
+
6
+
7
+ __version__ = "1.0.0"
@@ -0,0 +1,772 @@
1
+ from collections.abc import Iterator
2
+ from datetime import datetime, timezone
3
+ from time import monotonic, sleep
4
+ from typing import Any, TypeAlias
5
+
6
+ import psycopg
7
+ from psycopg.rows import DictRow, dict_row
8
+ from questdb.ingress import Sender
9
+
10
+ from vnpy.trader.constant import Exchange, Interval
11
+ from vnpy.trader.database import (
12
+ BaseDatabase,
13
+ BarOverview,
14
+ DB_TZ,
15
+ TickOverview,
16
+ convert_tz,
17
+ )
18
+ from vnpy.trader.object import BarData, TickData
19
+ from vnpy.trader.setting import SETTINGS
20
+
21
+
22
+ BAR_TABLE: str = "dbbardata"
23
+ TICK_TABLE: str = "dbtickdata"
24
+ FETCH_SIZE: int = 10_000
25
+ WAL_APPLY_TIMEOUT: float = 30
26
+
27
+ SqlValue: TypeAlias = str | int | float | bool | datetime | None
28
+ SqlParams: TypeAlias = tuple[SqlValue, ...]
29
+ IlpColumns: TypeAlias = dict[str, SqlValue]
30
+ RowTuple: TypeAlias = tuple[Any, ...]
31
+
32
+ CREATE_BAR_TABLE_SQL: str = f"""
33
+ CREATE TABLE IF NOT EXISTS {BAR_TABLE} (
34
+ symbol SYMBOL CAPACITY 256 CACHE,
35
+ exchange SYMBOL CAPACITY 32 CACHE,
36
+ interval SYMBOL CAPACITY 16 CACHE,
37
+ datetime TIMESTAMP,
38
+ volume DOUBLE,
39
+ turnover DOUBLE,
40
+ open_interest DOUBLE,
41
+ open_price DOUBLE,
42
+ high_price DOUBLE,
43
+ low_price DOUBLE,
44
+ close_price DOUBLE,
45
+ deleted BOOLEAN
46
+ ) TIMESTAMP(datetime)
47
+ PARTITION BY MONTH
48
+ WAL
49
+ DEDUP UPSERT KEYS(datetime, symbol, exchange, interval);
50
+ """
51
+
52
+ CREATE_TICK_TABLE_SQL: str = f"""
53
+ CREATE TABLE IF NOT EXISTS {TICK_TABLE} (
54
+ symbol SYMBOL CAPACITY 256 CACHE,
55
+ exchange SYMBOL CAPACITY 32 CACHE,
56
+ datetime TIMESTAMP,
57
+ name STRING,
58
+ volume DOUBLE,
59
+ turnover DOUBLE,
60
+ open_interest DOUBLE,
61
+ last_price DOUBLE,
62
+ last_volume DOUBLE,
63
+ limit_up DOUBLE,
64
+ limit_down DOUBLE,
65
+ open_price DOUBLE,
66
+ high_price DOUBLE,
67
+ low_price DOUBLE,
68
+ pre_close DOUBLE,
69
+ bid_price_1 DOUBLE,
70
+ bid_price_2 DOUBLE,
71
+ bid_price_3 DOUBLE,
72
+ bid_price_4 DOUBLE,
73
+ bid_price_5 DOUBLE,
74
+ ask_price_1 DOUBLE,
75
+ ask_price_2 DOUBLE,
76
+ ask_price_3 DOUBLE,
77
+ ask_price_4 DOUBLE,
78
+ ask_price_5 DOUBLE,
79
+ bid_volume_1 DOUBLE,
80
+ bid_volume_2 DOUBLE,
81
+ bid_volume_3 DOUBLE,
82
+ bid_volume_4 DOUBLE,
83
+ bid_volume_5 DOUBLE,
84
+ ask_volume_1 DOUBLE,
85
+ ask_volume_2 DOUBLE,
86
+ ask_volume_3 DOUBLE,
87
+ ask_volume_4 DOUBLE,
88
+ ask_volume_5 DOUBLE,
89
+ localtime TIMESTAMP,
90
+ deleted BOOLEAN
91
+ ) TIMESTAMP(datetime)
92
+ PARTITION BY DAY
93
+ WAL
94
+ DEDUP UPSERT KEYS(datetime, symbol, exchange);
95
+ """
96
+
97
+ LOAD_BAR_DATA_SQL: str = f"""
98
+ SELECT
99
+ datetime,
100
+ volume,
101
+ turnover,
102
+ open_interest,
103
+ open_price,
104
+ high_price,
105
+ low_price,
106
+ close_price
107
+ FROM {BAR_TABLE}
108
+ WHERE symbol = %s
109
+ AND exchange = %s
110
+ AND interval = %s
111
+ AND datetime >= %s
112
+ AND datetime <= %s
113
+ AND deleted = false
114
+ ORDER BY datetime;
115
+ """
116
+
117
+ LOAD_TICK_DATA_SQL: str = f"""
118
+ SELECT
119
+ datetime,
120
+ name,
121
+ volume,
122
+ turnover,
123
+ open_interest,
124
+ last_price,
125
+ last_volume,
126
+ limit_up,
127
+ limit_down,
128
+ open_price,
129
+ high_price,
130
+ low_price,
131
+ pre_close,
132
+ bid_price_1,
133
+ bid_price_2,
134
+ bid_price_3,
135
+ bid_price_4,
136
+ bid_price_5,
137
+ ask_price_1,
138
+ ask_price_2,
139
+ ask_price_3,
140
+ ask_price_4,
141
+ ask_price_5,
142
+ bid_volume_1,
143
+ bid_volume_2,
144
+ bid_volume_3,
145
+ bid_volume_4,
146
+ bid_volume_5,
147
+ ask_volume_1,
148
+ ask_volume_2,
149
+ ask_volume_3,
150
+ ask_volume_4,
151
+ ask_volume_5,
152
+ localtime
153
+ FROM {TICK_TABLE}
154
+ WHERE symbol = %s
155
+ AND exchange = %s
156
+ AND datetime >= %s
157
+ AND datetime <= %s
158
+ AND deleted = false
159
+ ORDER BY datetime;
160
+ """
161
+
162
+ COUNT_BAR_DATA_SQL: str = f"""
163
+ SELECT count() AS count
164
+ FROM {BAR_TABLE}
165
+ WHERE symbol = %s
166
+ AND exchange = %s
167
+ AND interval = %s
168
+ AND deleted = false;
169
+ """
170
+
171
+ SOFT_DELETE_BAR_DATA_SQL: str = f"""
172
+ UPDATE {BAR_TABLE}
173
+ SET deleted = true
174
+ WHERE symbol = %s
175
+ AND exchange = %s
176
+ AND interval = %s
177
+ AND deleted = false;
178
+ """
179
+
180
+ COUNT_TICK_DATA_SQL: str = f"""
181
+ SELECT count() AS count
182
+ FROM {TICK_TABLE}
183
+ WHERE symbol = %s
184
+ AND exchange = %s
185
+ AND deleted = false;
186
+ """
187
+
188
+ SOFT_DELETE_TICK_DATA_SQL: str = f"""
189
+ UPDATE {TICK_TABLE}
190
+ SET deleted = true
191
+ WHERE symbol = %s
192
+ AND exchange = %s
193
+ AND deleted = false;
194
+ """
195
+
196
+ GET_BAR_OVERVIEW_SQL: str = f"""
197
+ SELECT
198
+ symbol,
199
+ exchange,
200
+ interval,
201
+ count() AS count,
202
+ min(datetime) AS start_datetime,
203
+ max(datetime) AS end_datetime
204
+ FROM {BAR_TABLE}
205
+ WHERE deleted = false
206
+ GROUP BY symbol, exchange, interval
207
+ ORDER BY symbol, exchange, interval;
208
+ """
209
+
210
+ GET_TICK_OVERVIEW_SQL: str = f"""
211
+ SELECT
212
+ symbol,
213
+ exchange,
214
+ count() AS count,
215
+ min(datetime) AS start_datetime,
216
+ max(datetime) AS end_datetime
217
+ FROM {TICK_TABLE}
218
+ WHERE deleted = false
219
+ GROUP BY symbol, exchange
220
+ ORDER BY symbol, exchange;
221
+ """
222
+
223
+ WAL_TABLE_STATUS_SQL: str = """
224
+ SELECT
225
+ suspended,
226
+ writerTxn,
227
+ sequencerTxn,
228
+ errorMessage
229
+ FROM wal_tables()
230
+ WHERE name = %s;
231
+ """
232
+
233
+
234
+ class QuestdbDatabase(BaseDatabase):
235
+ """
236
+ QuestDB数据库接口。
237
+ """
238
+
239
+ def __init__(self) -> None:
240
+ """
241
+ 初始化QuestDB数据库接口。
242
+
243
+ 读取VeighNa数据库配置,构造PGWire连接参数和ILP写入配置,并确保
244
+ K线和Tick数据表已经创建。
245
+ """
246
+ self.host: str = str(SETTINGS.get("database.host", "localhost"))
247
+ self.port: int = int(SETTINGS.get("database.port", 8812))
248
+ self.user: str = str(SETTINGS.get("database.user", "admin"))
249
+ self.password: str = str(SETTINGS.get("database.password", "quest"))
250
+ self.database: str = str(SETTINGS.get("database.database", "qdb"))
251
+ self.http_port: int = int(SETTINGS.get("database.http_port", 9000))
252
+
253
+ self.conninfo: str = (
254
+ f"host={self.host} "
255
+ f"port={self.port} "
256
+ f"user={self.user} "
257
+ f"password={self.password} "
258
+ f"dbname={self.database}"
259
+ )
260
+ self.ilp_conf: str = self._create_ilp_conf()
261
+
262
+ self.init_tables()
263
+
264
+ def _create_ilp_conf(self) -> str:
265
+ """
266
+ 创建QuestDB ILP客户端配置。
267
+
268
+ Returns:
269
+ QuestDB Sender使用的HTTP ILP连接配置字符串。
270
+ """
271
+ # ILP写入使用HTTP端口,PGWire查询使用独立的SQL端口。
272
+ return f"http::addr={self.host}:{self.http_port};"
273
+
274
+ def init_tables(self) -> None:
275
+ """
276
+ 初始化数据库表。
277
+ """
278
+ with psycopg.connect(self.conninfo, autocommit=True) as conn:
279
+ with conn.cursor() as cursor:
280
+ cursor.execute(CREATE_BAR_TABLE_SQL)
281
+ cursor.execute(CREATE_TICK_TABLE_SQL)
282
+
283
+ def save_bar_data(self, bars: list[BarData], stream: bool = False) -> bool:
284
+ """
285
+ 保存K线数据。
286
+
287
+ Args:
288
+ bars: 待写入的K线数据列表。
289
+ stream: VeighNa数据库接口兼容参数,QuestDB写入逻辑不区分该参数。
290
+
291
+ Returns:
292
+ 写入成功返回True。
293
+
294
+ Raises:
295
+ ValueError: 当K线周期为空时抛出。
296
+ """
297
+ if not bars:
298
+ return True
299
+
300
+ with Sender.from_conf(self.ilp_conf) as sender:
301
+ for bar in bars:
302
+ interval: Interval | None = bar.interval
303
+ if interval is None:
304
+ # interval是QuestDB去重主键的一部分,写入前必须明确。
305
+ raise ValueError("BarData.interval不能为空")
306
+
307
+ sender.row(
308
+ BAR_TABLE,
309
+ symbols={
310
+ "symbol": bar.symbol,
311
+ "exchange": bar.exchange.value,
312
+ "interval": interval.value,
313
+ },
314
+ columns={
315
+ "volume": bar.volume,
316
+ "turnover": bar.turnover,
317
+ "open_interest": bar.open_interest,
318
+ "open_price": bar.open_price,
319
+ "high_price": bar.high_price,
320
+ "low_price": bar.low_price,
321
+ "close_price": bar.close_price,
322
+ "deleted": False,
323
+ },
324
+ at=self._to_questdb_datetime(bar.datetime),
325
+ )
326
+ sender.flush()
327
+
328
+ self._wait_wal_apply(BAR_TABLE)
329
+
330
+ return True
331
+
332
+ def save_tick_data(self, ticks: list[TickData], stream: bool = False) -> bool:
333
+ """
334
+ 保存Tick数据。
335
+
336
+ Args:
337
+ ticks: 待写入的Tick数据列表。
338
+ stream: VeighNa数据库接口兼容参数,QuestDB写入逻辑不区分该参数。
339
+
340
+ Returns:
341
+ 写入成功返回True。
342
+ """
343
+ if not ticks:
344
+ return True
345
+
346
+ with Sender.from_conf(self.ilp_conf) as sender:
347
+ for tick in ticks:
348
+ columns: IlpColumns = {
349
+ "name": tick.name,
350
+ "volume": tick.volume,
351
+ "turnover": tick.turnover,
352
+ "open_interest": tick.open_interest,
353
+ "last_price": tick.last_price,
354
+ "last_volume": tick.last_volume,
355
+ "limit_up": tick.limit_up,
356
+ "limit_down": tick.limit_down,
357
+ "open_price": tick.open_price,
358
+ "high_price": tick.high_price,
359
+ "low_price": tick.low_price,
360
+ "pre_close": tick.pre_close,
361
+ "bid_price_1": tick.bid_price_1,
362
+ "bid_price_2": tick.bid_price_2,
363
+ "bid_price_3": tick.bid_price_3,
364
+ "bid_price_4": tick.bid_price_4,
365
+ "bid_price_5": tick.bid_price_5,
366
+ "ask_price_1": tick.ask_price_1,
367
+ "ask_price_2": tick.ask_price_2,
368
+ "ask_price_3": tick.ask_price_3,
369
+ "ask_price_4": tick.ask_price_4,
370
+ "ask_price_5": tick.ask_price_5,
371
+ "bid_volume_1": tick.bid_volume_1,
372
+ "bid_volume_2": tick.bid_volume_2,
373
+ "bid_volume_3": tick.bid_volume_3,
374
+ "bid_volume_4": tick.bid_volume_4,
375
+ "bid_volume_5": tick.bid_volume_5,
376
+ "ask_volume_1": tick.ask_volume_1,
377
+ "ask_volume_2": tick.ask_volume_2,
378
+ "ask_volume_3": tick.ask_volume_3,
379
+ "ask_volume_4": tick.ask_volume_4,
380
+ "ask_volume_5": tick.ask_volume_5,
381
+ "deleted": False,
382
+ }
383
+
384
+ if tick.localtime:
385
+ columns["localtime"] = self._to_questdb_datetime(tick.localtime)
386
+
387
+ sender.row(
388
+ TICK_TABLE,
389
+ symbols={
390
+ "symbol": tick.symbol,
391
+ "exchange": tick.exchange.value,
392
+ },
393
+ columns=columns,
394
+ at=self._to_questdb_datetime(tick.datetime),
395
+ )
396
+ sender.flush()
397
+
398
+ self._wait_wal_apply(TICK_TABLE)
399
+
400
+ return True
401
+
402
+ def load_bar_data(
403
+ self,
404
+ symbol: str,
405
+ exchange: Exchange,
406
+ interval: Interval,
407
+ start: datetime,
408
+ end: datetime
409
+ ) -> list[BarData]:
410
+ """
411
+ 读取K线数据。
412
+
413
+ Args:
414
+ symbol: 合约代码。
415
+ exchange: 交易所。
416
+ interval: K线周期。
417
+ start: 查询开始时间。
418
+ end: 查询结束时间。
419
+
420
+ Returns:
421
+ 按时间升序排列的K线数据列表。
422
+ """
423
+ params: SqlParams = (
424
+ symbol,
425
+ exchange.value,
426
+ interval.value,
427
+ self._to_pg_datetime(start),
428
+ self._to_pg_datetime(end),
429
+ )
430
+
431
+ bars: list[BarData] = []
432
+ append = bars.append
433
+ from_datetime = self._from_questdb_datetime
434
+ for row in self._iter_tuples(LOAD_BAR_DATA_SQL, params):
435
+ bar: BarData = BarData(
436
+ symbol=symbol,
437
+ exchange=exchange,
438
+ datetime=from_datetime(row[0]),
439
+ interval=interval,
440
+ volume=row[1],
441
+ turnover=row[2],
442
+ open_interest=row[3],
443
+ open_price=row[4],
444
+ high_price=row[5],
445
+ low_price=row[6],
446
+ close_price=row[7],
447
+ gateway_name="DB",
448
+ )
449
+ append(bar)
450
+
451
+ return bars
452
+
453
+ def load_tick_data(
454
+ self,
455
+ symbol: str,
456
+ exchange: Exchange,
457
+ start: datetime,
458
+ end: datetime
459
+ ) -> list[TickData]:
460
+ """
461
+ 读取Tick数据。
462
+
463
+ Args:
464
+ symbol: 合约代码。
465
+ exchange: 交易所。
466
+ start: 查询开始时间。
467
+ end: 查询结束时间。
468
+
469
+ Returns:
470
+ 按时间升序排列的Tick数据列表。
471
+ """
472
+ params: SqlParams = (
473
+ symbol,
474
+ exchange.value,
475
+ self._to_pg_datetime(start),
476
+ self._to_pg_datetime(end),
477
+ )
478
+
479
+ ticks: list[TickData] = []
480
+ append = ticks.append
481
+ from_datetime = self._from_questdb_datetime
482
+ for row in self._iter_tuples(LOAD_TICK_DATA_SQL, params):
483
+ localtime: datetime | None = None
484
+ if row[33]:
485
+ localtime = from_datetime(row[33])
486
+
487
+ tick: TickData = TickData(
488
+ symbol=symbol,
489
+ exchange=exchange,
490
+ datetime=from_datetime(row[0]),
491
+ name=row[1],
492
+ volume=row[2],
493
+ turnover=row[3],
494
+ open_interest=row[4],
495
+ last_price=row[5],
496
+ last_volume=row[6],
497
+ limit_up=row[7],
498
+ limit_down=row[8],
499
+ open_price=row[9],
500
+ high_price=row[10],
501
+ low_price=row[11],
502
+ pre_close=row[12],
503
+ bid_price_1=row[13],
504
+ bid_price_2=row[14],
505
+ bid_price_3=row[15],
506
+ bid_price_4=row[16],
507
+ bid_price_5=row[17],
508
+ ask_price_1=row[18],
509
+ ask_price_2=row[19],
510
+ ask_price_3=row[20],
511
+ ask_price_4=row[21],
512
+ ask_price_5=row[22],
513
+ bid_volume_1=row[23],
514
+ bid_volume_2=row[24],
515
+ bid_volume_3=row[25],
516
+ bid_volume_4=row[26],
517
+ bid_volume_5=row[27],
518
+ ask_volume_1=row[28],
519
+ ask_volume_2=row[29],
520
+ ask_volume_3=row[30],
521
+ ask_volume_4=row[31],
522
+ ask_volume_5=row[32],
523
+ localtime=localtime,
524
+ gateway_name="DB",
525
+ )
526
+ append(tick)
527
+
528
+ return ticks
529
+
530
+ def delete_bar_data(
531
+ self,
532
+ symbol: str,
533
+ exchange: Exchange,
534
+ interval: Interval
535
+ ) -> int:
536
+ """
537
+ 软删除K线数据。
538
+
539
+ Args:
540
+ symbol: 合约代码。
541
+ exchange: 交易所。
542
+ interval: K线周期。
543
+
544
+ Returns:
545
+ 被标记删除的K线数据数量。
546
+ """
547
+ params: SqlParams = (symbol, exchange.value, interval.value)
548
+
549
+ # 使用deleted标记软删除,避免直接移除QuestDB WAL表中的历史记录。
550
+ count: int = self._query_count(COUNT_BAR_DATA_SQL, params)
551
+ self._execute(SOFT_DELETE_BAR_DATA_SQL, params)
552
+ self._wait_wal_apply(BAR_TABLE)
553
+
554
+ return count
555
+
556
+ def delete_tick_data(
557
+ self,
558
+ symbol: str,
559
+ exchange: Exchange
560
+ ) -> int:
561
+ """
562
+ 软删除Tick数据。
563
+
564
+ Args:
565
+ symbol: 合约代码。
566
+ exchange: 交易所。
567
+
568
+ Returns:
569
+ 被标记删除的Tick数据数量。
570
+ """
571
+ params: SqlParams = (symbol, exchange.value)
572
+
573
+ # 使用deleted标记软删除,避免直接移除QuestDB WAL表中的历史记录。
574
+ count: int = self._query_count(COUNT_TICK_DATA_SQL, params)
575
+ self._execute(SOFT_DELETE_TICK_DATA_SQL, params)
576
+ self._wait_wal_apply(TICK_TABLE)
577
+
578
+ return count
579
+
580
+ def get_bar_overview(self) -> list[BarOverview]:
581
+ """
582
+ 查询数据库中的K线汇总信息。
583
+
584
+ Returns:
585
+ K线汇总信息列表。
586
+ """
587
+ overviews: list[BarOverview] = []
588
+ for row in self._iter_rows(GET_BAR_OVERVIEW_SQL):
589
+ overview: BarOverview = BarOverview(
590
+ symbol=row["symbol"],
591
+ exchange=Exchange(row["exchange"]),
592
+ interval=Interval(row["interval"]),
593
+ count=int(row["count"]),
594
+ start=self._from_questdb_datetime(row["start_datetime"]),
595
+ end=self._from_questdb_datetime(row["end_datetime"]),
596
+ )
597
+ overviews.append(overview)
598
+
599
+ return overviews
600
+
601
+ def get_tick_overview(self) -> list[TickOverview]:
602
+ """
603
+ 查询数据库中的Tick汇总信息。
604
+
605
+ Returns:
606
+ Tick汇总信息列表。
607
+ """
608
+ overviews: list[TickOverview] = []
609
+ for row in self._iter_rows(GET_TICK_OVERVIEW_SQL):
610
+ overview: TickOverview = TickOverview(
611
+ symbol=row["symbol"],
612
+ exchange=Exchange(row["exchange"]),
613
+ count=int(row["count"]),
614
+ start=self._from_questdb_datetime(row["start_datetime"]),
615
+ end=self._from_questdb_datetime(row["end_datetime"]),
616
+ )
617
+ overviews.append(overview)
618
+
619
+ return overviews
620
+
621
+ def _iter_rows(
622
+ self,
623
+ sql: str,
624
+ params: SqlParams | None = None
625
+ ) -> Iterator[DictRow]:
626
+ """
627
+ 分批读取字典格式查询结果。
628
+
629
+ Args:
630
+ sql: 待执行的SQL语句。
631
+ params: SQL查询参数。
632
+
633
+ Yields:
634
+ 字典格式的查询结果行。
635
+ """
636
+ with psycopg.connect(self.conninfo, row_factory=dict_row) as conn:
637
+ with conn.cursor() as cursor:
638
+ cursor.execute(sql, params)
639
+ while batch := cursor.fetchmany(FETCH_SIZE):
640
+ yield from batch
641
+
642
+ def _iter_tuples(
643
+ self,
644
+ sql: str,
645
+ params: SqlParams | None = None
646
+ ) -> Iterator[RowTuple]:
647
+ """
648
+ 分批读取元组格式查询结果。
649
+
650
+ Args:
651
+ sql: 待执行的SQL语句。
652
+ params: SQL查询参数。
653
+
654
+ Yields:
655
+ 元组格式的查询结果行。
656
+ """
657
+ with psycopg.connect(self.conninfo) as conn:
658
+ with conn.cursor() as cursor:
659
+ cursor.execute(sql, params)
660
+ while batch := cursor.fetchmany(FETCH_SIZE):
661
+ yield from batch
662
+
663
+ def _query_count(self, sql: str, params: SqlParams) -> int:
664
+ """
665
+ 查询单个count结果。
666
+
667
+ Args:
668
+ sql: 返回count字段的SQL语句。
669
+ params: SQL查询参数。
670
+
671
+ Returns:
672
+ 查询到的count字段值。
673
+ """
674
+ with psycopg.connect(self.conninfo, row_factory=dict_row) as conn:
675
+ with conn.cursor() as cursor:
676
+ cursor.execute(sql, params)
677
+ row: DictRow | None = cursor.fetchone()
678
+ if not row:
679
+ return 0
680
+ return int(row["count"])
681
+
682
+ def _execute(self, sql: str, params: SqlParams) -> None:
683
+ """
684
+ 执行SQL语句。
685
+
686
+ Args:
687
+ sql: 待执行的SQL语句。
688
+ params: SQL参数。
689
+ """
690
+ with psycopg.connect(self.conninfo, autocommit=True) as conn:
691
+ with conn.cursor() as cursor:
692
+ cursor.execute(sql, params)
693
+
694
+ def _wait_wal_apply(self, table_name: str) -> None:
695
+ """
696
+ 等待WAL事务应用到可查询表数据。
697
+
698
+ Args:
699
+ table_name: QuestDB WAL表名。
700
+
701
+ Raises:
702
+ RuntimeError: 当WAL表处于暂停状态时抛出。
703
+ TimeoutError: 当等待WAL事务应用超时时抛出。
704
+ """
705
+ if WAL_APPLY_TIMEOUT <= 0:
706
+ return
707
+
708
+ deadline: float = monotonic() + WAL_APPLY_TIMEOUT
709
+
710
+ while True:
711
+ with psycopg.connect(self.conninfo, row_factory=dict_row) as conn:
712
+ with conn.cursor() as cursor:
713
+ cursor.execute(WAL_TABLE_STATUS_SQL, (table_name,))
714
+ row: DictRow | None = cursor.fetchone()
715
+
716
+ if not row:
717
+ return
718
+
719
+ if row["suspended"]:
720
+ raise RuntimeError(f"QuestDB WAL表{table_name}已暂停: {row['errorMessage']}")
721
+
722
+ # WAL写入需要等writer追上sequencer,后续PGWire查询才能稳定读到新数据。
723
+ if row["writerTxn"] == row["sequencerTxn"]:
724
+ return
725
+
726
+ if monotonic() >= deadline:
727
+ raise TimeoutError(f"等待QuestDB WAL表{table_name}应用超时")
728
+
729
+ sleep(0.05)
730
+
731
+ @staticmethod
732
+ def _to_questdb_datetime(dt: datetime) -> datetime:
733
+ """
734
+ 转换为QuestDB ILP写入使用的UTC时间。
735
+
736
+ Args:
737
+ dt: 待转换的时间。
738
+
739
+ Returns:
740
+ 带UTC时区信息的时间。
741
+ """
742
+ db_dt: datetime = convert_tz(dt).replace(tzinfo=DB_TZ)
743
+ return db_dt.astimezone(timezone.utc)
744
+
745
+ @classmethod
746
+ def _to_pg_datetime(cls, dt: datetime) -> datetime:
747
+ """
748
+ 转换为PGWire查询使用的UTC naive时间。
749
+
750
+ Args:
751
+ dt: 待转换的时间。
752
+
753
+ Returns:
754
+ 不带时区信息的UTC时间。
755
+ """
756
+ return cls._to_questdb_datetime(dt).replace(tzinfo=None)
757
+
758
+ @staticmethod
759
+ def _from_questdb_datetime(dt: datetime) -> datetime:
760
+ """
761
+ 将QuestDB返回时间转换为VeighNa数据库时区。
762
+
763
+ Args:
764
+ dt: QuestDB返回的时间,可能带时区信息,也可能是UTC naive时间。
765
+
766
+ Returns:
767
+ 转换到VeighNa数据库时区的时间。
768
+ """
769
+ if dt.tzinfo:
770
+ return dt.astimezone(DB_TZ)
771
+
772
+ return dt.replace(tzinfo=timezone.utc).astimezone(DB_TZ)