tradepose-client 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tradepose-client might be problematic. Click here for more details.
- tradepose_client/__init__.py +156 -0
- tradepose_client/analysis.py +302 -0
- tradepose_client/api/__init__.py +8 -0
- tradepose_client/api/engine.py +59 -0
- tradepose_client/api/export.py +828 -0
- tradepose_client/api/health.py +70 -0
- tradepose_client/api/strategy.py +228 -0
- tradepose_client/client.py +58 -0
- tradepose_client/models.py +1836 -0
- tradepose_client/schema.py +186 -0
- tradepose_client/viz.py +762 -0
- tradepose_client-0.1.0.dist-info/METADATA +576 -0
- tradepose_client-0.1.0.dist-info/RECORD +15 -0
- tradepose_client-0.1.0.dist-info/WHEEL +4 -0
- tradepose_client-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"""Tradepose Python Client
|
|
2
|
+
|
|
3
|
+
易於使用的 Tradepose API 客戶端,提供策略管理、數據導出和分析功能。
|
|
4
|
+
|
|
5
|
+
Example:
|
|
6
|
+
>>> from tradepose_client import TradeposeClient
|
|
7
|
+
>>> client = TradeposeClient()
|
|
8
|
+
>>>
|
|
9
|
+
>>> # 列出策略
|
|
10
|
+
>>> strategies = client.list_strategies()
|
|
11
|
+
>>>
|
|
12
|
+
>>> # 快速導出數據
|
|
13
|
+
>>> df = client.quick_export("my_strategy")
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from .client import TradeposeClient
|
|
17
|
+
from .models import (
|
|
18
|
+
StrategyConfig,
|
|
19
|
+
Blueprint,
|
|
20
|
+
Trigger,
|
|
21
|
+
IndicatorSpec,
|
|
22
|
+
Freq,
|
|
23
|
+
OrderStrategy,
|
|
24
|
+
Indicator,
|
|
25
|
+
create_trigger,
|
|
26
|
+
create_blueprint,
|
|
27
|
+
create_indicator_spec,
|
|
28
|
+
parse_strategy,
|
|
29
|
+
)
|
|
30
|
+
from .schema import (
|
|
31
|
+
enhanced_ohlcv_schema,
|
|
32
|
+
trades_schema,
|
|
33
|
+
performance_schema,
|
|
34
|
+
)
|
|
35
|
+
from .analysis import (
|
|
36
|
+
calculate_returns,
|
|
37
|
+
calculate_volatility,
|
|
38
|
+
calculate_drawdown,
|
|
39
|
+
get_max_drawdown,
|
|
40
|
+
filter_by_date_range,
|
|
41
|
+
split_train_test,
|
|
42
|
+
find_indicator_columns,
|
|
43
|
+
resample_to_daily,
|
|
44
|
+
print_summary,
|
|
45
|
+
)
|
|
46
|
+
from .viz import (
|
|
47
|
+
plot_mae_mfe_scatter,
|
|
48
|
+
plot_mfe_mhl_analysis,
|
|
49
|
+
plot_pnl_curves,
|
|
50
|
+
plot_trade_histograms,
|
|
51
|
+
combine_charts,
|
|
52
|
+
add_win_loss_label,
|
|
53
|
+
calculate_mea,
|
|
54
|
+
calculate_mae_atr_ratio, # Legacy alias
|
|
55
|
+
calculate_cumulative_pnl,
|
|
56
|
+
get_quantiles,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
__version__ = "0.1.0"
|
|
60
|
+
|
|
61
|
+
__all__ = [
|
|
62
|
+
# Client
|
|
63
|
+
"TradeposeClient",
|
|
64
|
+
"get_client",
|
|
65
|
+
"quick_export",
|
|
66
|
+
# Models
|
|
67
|
+
"StrategyConfig",
|
|
68
|
+
"Blueprint",
|
|
69
|
+
"Trigger",
|
|
70
|
+
"IndicatorSpec",
|
|
71
|
+
"Freq",
|
|
72
|
+
"OrderStrategy",
|
|
73
|
+
"Indicator",
|
|
74
|
+
"create_trigger",
|
|
75
|
+
"create_blueprint",
|
|
76
|
+
"create_indicator_spec",
|
|
77
|
+
"parse_strategy",
|
|
78
|
+
# Schema
|
|
79
|
+
"enhanced_ohlcv_schema",
|
|
80
|
+
"trades_schema",
|
|
81
|
+
"performance_schema",
|
|
82
|
+
# Analysis
|
|
83
|
+
"calculate_returns",
|
|
84
|
+
"calculate_volatility",
|
|
85
|
+
"calculate_drawdown",
|
|
86
|
+
"get_max_drawdown",
|
|
87
|
+
"filter_by_date_range",
|
|
88
|
+
"split_train_test",
|
|
89
|
+
"find_indicator_columns",
|
|
90
|
+
"resample_to_daily",
|
|
91
|
+
"print_summary",
|
|
92
|
+
# Visualization
|
|
93
|
+
"plot_mae_mfe_scatter",
|
|
94
|
+
"plot_mfe_mhl_analysis",
|
|
95
|
+
"plot_pnl_curves",
|
|
96
|
+
"plot_trade_histograms",
|
|
97
|
+
"combine_charts",
|
|
98
|
+
"add_win_loss_label",
|
|
99
|
+
"calculate_mea",
|
|
100
|
+
"calculate_mae_atr_ratio", # Legacy alias
|
|
101
|
+
"calculate_cumulative_pnl",
|
|
102
|
+
"get_quantiles",
|
|
103
|
+
]
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def get_client(
|
|
107
|
+
api_url: str = "http://localhost:8080",
|
|
108
|
+
redis_url: str = "redis://:tradepose_password@localhost:6379",
|
|
109
|
+
) -> TradeposeClient:
|
|
110
|
+
"""獲取 Tradepose 客戶端實例
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
api_url: API URL
|
|
114
|
+
redis_url: Redis URL
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
TradeposeClient 實例
|
|
118
|
+
|
|
119
|
+
Example:
|
|
120
|
+
>>> client = get_client()
|
|
121
|
+
>>> strategies = client.list_strategies()
|
|
122
|
+
"""
|
|
123
|
+
return TradeposeClient(api_url=api_url, redis_url=redis_url)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def quick_export(
|
|
127
|
+
strategy_name: str,
|
|
128
|
+
blueprint_name: str = None,
|
|
129
|
+
start_date: str = "2020-01-01T00:00:00",
|
|
130
|
+
end_date: str = None,
|
|
131
|
+
save_path: str = None,
|
|
132
|
+
):
|
|
133
|
+
"""快速導出數據(最簡單的使用方式)
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
strategy_name: 策略名稱
|
|
137
|
+
blueprint_name: 藍圖名稱
|
|
138
|
+
start_date: 起始日期
|
|
139
|
+
end_date: 終止日期
|
|
140
|
+
save_path: 保存路徑
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
Polars DataFrame
|
|
144
|
+
|
|
145
|
+
Example:
|
|
146
|
+
>>> from tradepose_client import quick_export
|
|
147
|
+
>>> df = quick_export("txf_1h_sma30_50", start_date="2020-01-01T00:00:00")
|
|
148
|
+
"""
|
|
149
|
+
client = get_client()
|
|
150
|
+
return client.quick_export(
|
|
151
|
+
strategy_name=strategy_name,
|
|
152
|
+
blueprint_name=blueprint_name,
|
|
153
|
+
start_date=start_date,
|
|
154
|
+
end_date=end_date,
|
|
155
|
+
save_path=save_path,
|
|
156
|
+
)
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
"""
|
|
2
|
+
數據分析輔助函數
|
|
3
|
+
|
|
4
|
+
提供常用的技術分析和數據處理函數,方便在 Jupyter Notebook 中使用。
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import polars as pl
|
|
8
|
+
from typing import List, Optional, Tuple
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def calculate_returns(
|
|
12
|
+
df: pl.DataFrame,
|
|
13
|
+
price_col: str = "close"
|
|
14
|
+
) -> pl.DataFrame:
|
|
15
|
+
"""計算收益率
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
df: 輸入 DataFrame
|
|
19
|
+
price_col: 價格列名
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
添加了 returns 和 log_returns 列的 DataFrame
|
|
23
|
+
"""
|
|
24
|
+
return df.with_columns([
|
|
25
|
+
((pl.col(price_col) / pl.col(price_col).shift(1)) - 1).alias("returns"),
|
|
26
|
+
(pl.col(price_col) / pl.col(price_col).shift(1)).log().alias("log_returns")
|
|
27
|
+
])
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def calculate_volatility(
|
|
31
|
+
df: pl.DataFrame,
|
|
32
|
+
returns_col: str = "returns",
|
|
33
|
+
window: int = 20
|
|
34
|
+
) -> pl.DataFrame:
|
|
35
|
+
"""計算滾動波動率
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
df: 輸入 DataFrame
|
|
39
|
+
returns_col: 收益率列名
|
|
40
|
+
window: 滾動窗口大小
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
添加了 volatility 列的 DataFrame
|
|
44
|
+
"""
|
|
45
|
+
return df.with_columns([
|
|
46
|
+
pl.col(returns_col).rolling_std(window).alias(f"volatility_{window}d")
|
|
47
|
+
])
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def resample_to_daily(
|
|
51
|
+
df: pl.DataFrame,
|
|
52
|
+
timestamp_col: str = "ts"
|
|
53
|
+
) -> pl.DataFrame:
|
|
54
|
+
"""重採樣為日線數據
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
df: 輸入 DataFrame(分鐘線或其他高頻數據)
|
|
58
|
+
timestamp_col: 時間戳列名
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
日線 DataFrame
|
|
62
|
+
"""
|
|
63
|
+
required_cols = ["open", "high", "low", "close"]
|
|
64
|
+
if not all(col in df.columns for col in required_cols):
|
|
65
|
+
raise ValueError(f"DataFrame 必須包含列: {required_cols}")
|
|
66
|
+
|
|
67
|
+
daily = df.group_by_dynamic(
|
|
68
|
+
timestamp_col,
|
|
69
|
+
every="1d"
|
|
70
|
+
).agg([
|
|
71
|
+
pl.col("open").first().alias("open"),
|
|
72
|
+
pl.col("high").max().alias("high"),
|
|
73
|
+
pl.col("low").min().alias("low"),
|
|
74
|
+
pl.col("close").last().alias("close"),
|
|
75
|
+
pl.col("volume").sum().alias("volume") if "volume" in df.columns else pl.lit(0).alias("volume"),
|
|
76
|
+
pl.count().alias("num_bars")
|
|
77
|
+
]).sort(timestamp_col)
|
|
78
|
+
|
|
79
|
+
return daily
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def find_indicator_columns(
|
|
83
|
+
df: pl.DataFrame,
|
|
84
|
+
patterns: Optional[List[str]] = None
|
|
85
|
+
) -> List[str]:
|
|
86
|
+
"""查找技術指標列
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
df: 輸入 DataFrame
|
|
90
|
+
patterns: 要搜索的模式列表(默認為常見技術指標)
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
匹配的列名列表
|
|
94
|
+
"""
|
|
95
|
+
if patterns is None:
|
|
96
|
+
patterns = ["SMA", "EMA", "ATR", "RSI", "MACD", "BB", "BOLL"]
|
|
97
|
+
|
|
98
|
+
indicator_cols = [
|
|
99
|
+
col for col in df.columns
|
|
100
|
+
if any(pattern in col for pattern in patterns)
|
|
101
|
+
]
|
|
102
|
+
|
|
103
|
+
return indicator_cols
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def filter_by_date_range(
|
|
107
|
+
df: pl.DataFrame,
|
|
108
|
+
start_date: str,
|
|
109
|
+
end_date: str,
|
|
110
|
+
timestamp_col: str = "ts"
|
|
111
|
+
) -> pl.DataFrame:
|
|
112
|
+
"""按日期範圍過濾數據
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
df: 輸入 DataFrame
|
|
116
|
+
start_date: 起始日期(ISO 8601 格式)
|
|
117
|
+
end_date: 結束日期(ISO 8601 格式)
|
|
118
|
+
timestamp_col: 時間戳列名
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
過濾後的 DataFrame
|
|
122
|
+
"""
|
|
123
|
+
return df.filter(
|
|
124
|
+
(pl.col(timestamp_col) >= start_date) &
|
|
125
|
+
(pl.col(timestamp_col) <= end_date)
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def get_ohlcv_stats(df: pl.DataFrame) -> pl.DataFrame:
|
|
130
|
+
"""獲取 OHLCV 統計信息
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
df: 輸入 DataFrame
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
統計信息 DataFrame
|
|
137
|
+
"""
|
|
138
|
+
ohlcv_cols = ["open", "high", "low", "close", "volume"]
|
|
139
|
+
available_cols = [col for col in ohlcv_cols if col in df.columns]
|
|
140
|
+
|
|
141
|
+
if not available_cols:
|
|
142
|
+
raise ValueError("DataFrame 不包含任何 OHLCV 列")
|
|
143
|
+
|
|
144
|
+
return df.select(available_cols).describe()
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def detect_outliers(
|
|
148
|
+
df: pl.DataFrame,
|
|
149
|
+
column: str,
|
|
150
|
+
n_std: float = 3.0
|
|
151
|
+
) -> Tuple[pl.DataFrame, int]:
|
|
152
|
+
"""使用標準差方法檢測異常值
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
df: 輸入 DataFrame
|
|
156
|
+
column: 要檢測的列名
|
|
157
|
+
n_std: 標準差倍數(默認 3.0)
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
(標記了異常值的 DataFrame, 異常值數量)
|
|
161
|
+
"""
|
|
162
|
+
mean = df[column].mean()
|
|
163
|
+
std = df[column].std()
|
|
164
|
+
|
|
165
|
+
lower_bound = mean - n_std * std
|
|
166
|
+
upper_bound = mean + n_std * std
|
|
167
|
+
|
|
168
|
+
df_with_outliers = df.with_columns([
|
|
169
|
+
(
|
|
170
|
+
(pl.col(column) < lower_bound) |
|
|
171
|
+
(pl.col(column) > upper_bound)
|
|
172
|
+
).alias(f"{column}_is_outlier")
|
|
173
|
+
])
|
|
174
|
+
|
|
175
|
+
outlier_count = df_with_outliers.filter(pl.col(f"{column}_is_outlier")).height
|
|
176
|
+
|
|
177
|
+
return df_with_outliers, outlier_count
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def calculate_drawdown(
|
|
181
|
+
df: pl.DataFrame,
|
|
182
|
+
price_col: str = "close"
|
|
183
|
+
) -> pl.DataFrame:
|
|
184
|
+
"""計算回撤
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
df: 輸入 DataFrame
|
|
188
|
+
price_col: 價格列名
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
添加了 cummax 和 drawdown 列的 DataFrame
|
|
192
|
+
"""
|
|
193
|
+
return df.with_columns([
|
|
194
|
+
pl.col(price_col).cum_max().alias("cummax"),
|
|
195
|
+
((pl.col(price_col) / pl.col(price_col).cum_max()) - 1).alias("drawdown")
|
|
196
|
+
])
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def get_max_drawdown(
|
|
200
|
+
df: pl.DataFrame,
|
|
201
|
+
price_col: str = "close"
|
|
202
|
+
) -> float:
|
|
203
|
+
"""獲取最大回撤
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
df: 輸入 DataFrame
|
|
207
|
+
price_col: 價格列名
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
最大回撤值(百分比)
|
|
211
|
+
"""
|
|
212
|
+
df_with_dd = calculate_drawdown(df, price_col)
|
|
213
|
+
return df_with_dd["drawdown"].min()
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def split_train_test(
|
|
217
|
+
df: pl.DataFrame,
|
|
218
|
+
train_ratio: float = 0.8,
|
|
219
|
+
timestamp_col: str = "ts"
|
|
220
|
+
) -> Tuple[pl.DataFrame, pl.DataFrame]:
|
|
221
|
+
"""按時間順序分割訓練集和測試集
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
df: 輸入 DataFrame
|
|
225
|
+
train_ratio: 訓練集比例
|
|
226
|
+
timestamp_col: 時間戳列名
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
(訓練集, 測試集)
|
|
230
|
+
"""
|
|
231
|
+
df_sorted = df.sort(timestamp_col)
|
|
232
|
+
split_idx = int(len(df_sorted) * train_ratio)
|
|
233
|
+
|
|
234
|
+
train = df_sorted[:split_idx]
|
|
235
|
+
test = df_sorted[split_idx:]
|
|
236
|
+
|
|
237
|
+
return train, test
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def summary_stats(df: pl.DataFrame, price_col: str = "close") -> dict:
|
|
241
|
+
"""計算常用的總結統計量
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
df: 輸入 DataFrame
|
|
245
|
+
price_col: 價格列名
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
統計量字典
|
|
249
|
+
"""
|
|
250
|
+
df_with_returns = calculate_returns(df, price_col)
|
|
251
|
+
|
|
252
|
+
stats = {
|
|
253
|
+
"總行數": len(df),
|
|
254
|
+
"起始價格": df[price_col].first(),
|
|
255
|
+
"結束價格": df[price_col].last(),
|
|
256
|
+
"最高價": df[price_col].max(),
|
|
257
|
+
"最低價": df[price_col].min(),
|
|
258
|
+
"平均價格": df[price_col].mean(),
|
|
259
|
+
"價格標準差": df[price_col].std(),
|
|
260
|
+
"總收益率": (df[price_col].last() / df[price_col].first() - 1) * 100,
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
if "returns" in df_with_returns.columns:
|
|
264
|
+
stats.update({
|
|
265
|
+
"平均日收益率": df_with_returns["returns"].mean() * 100,
|
|
266
|
+
"收益率標準差": df_with_returns["returns"].std() * 100,
|
|
267
|
+
"夏普比率(假設 Rf=0)": (
|
|
268
|
+
df_with_returns["returns"].mean() / df_with_returns["returns"].std()
|
|
269
|
+
if df_with_returns["returns"].std() != 0 else 0
|
|
270
|
+
),
|
|
271
|
+
})
|
|
272
|
+
|
|
273
|
+
# 計算最大回撤
|
|
274
|
+
max_dd = get_max_drawdown(df, price_col)
|
|
275
|
+
stats["最大回撤"] = max_dd * 100
|
|
276
|
+
|
|
277
|
+
return stats
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
def print_summary(df: pl.DataFrame, price_col: str = "close"):
|
|
281
|
+
"""打印數據摘要
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
df: 輸入 DataFrame
|
|
285
|
+
price_col: 價格列名
|
|
286
|
+
"""
|
|
287
|
+
stats = summary_stats(df, price_col)
|
|
288
|
+
|
|
289
|
+
print("=" * 50)
|
|
290
|
+
print("數據摘要統計")
|
|
291
|
+
print("=" * 50)
|
|
292
|
+
|
|
293
|
+
for key, value in stats.items():
|
|
294
|
+
if isinstance(value, float):
|
|
295
|
+
if "率" in key or "撤" in key:
|
|
296
|
+
print(f"{key:<20}: {value:>10.2f}%")
|
|
297
|
+
else:
|
|
298
|
+
print(f"{key:<20}: {value:>10.2f}")
|
|
299
|
+
else:
|
|
300
|
+
print(f"{key:<20}: {value:>10}")
|
|
301
|
+
|
|
302
|
+
print("=" * 50)
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""引擎管理 API"""
|
|
2
|
+
|
|
3
|
+
from typing import Dict, Optional
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class EngineAPI:
|
|
8
|
+
"""引擎管理 API Mixin"""
|
|
9
|
+
|
|
10
|
+
def clear_data(self, instrument_id: Optional[str] = None) -> Dict:
|
|
11
|
+
"""清理 OHLCV 數據
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
instrument_id: 要清理的 instrument ID,None = 清空所有
|
|
15
|
+
|
|
16
|
+
Returns:
|
|
17
|
+
清理結果
|
|
18
|
+
|
|
19
|
+
Raises:
|
|
20
|
+
requests.HTTPError: 500
|
|
21
|
+
|
|
22
|
+
Example:
|
|
23
|
+
>>> # 清理特定商品
|
|
24
|
+
>>> client.clear_data(instrument_id="TXF")
|
|
25
|
+
>>>
|
|
26
|
+
>>> # 清空所有數據
|
|
27
|
+
>>> client.clear_data()
|
|
28
|
+
"""
|
|
29
|
+
payload = {}
|
|
30
|
+
if instrument_id is not None:
|
|
31
|
+
payload["instrument_id"] = instrument_id
|
|
32
|
+
|
|
33
|
+
response = requests.post(
|
|
34
|
+
f"{self.api_url}/api/v1/engine/clear-data",
|
|
35
|
+
json=payload,
|
|
36
|
+
headers=self._get_headers(),
|
|
37
|
+
)
|
|
38
|
+
response.raise_for_status()
|
|
39
|
+
return response.json()
|
|
40
|
+
|
|
41
|
+
def reset_engine(self) -> Dict:
|
|
42
|
+
"""重置引擎
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
重置結果
|
|
46
|
+
|
|
47
|
+
Raises:
|
|
48
|
+
requests.HTTPError: 500
|
|
49
|
+
|
|
50
|
+
Example:
|
|
51
|
+
>>> client.reset_engine()
|
|
52
|
+
"""
|
|
53
|
+
response = requests.post(
|
|
54
|
+
f"{self.api_url}/api/v1/engine/reset",
|
|
55
|
+
json={},
|
|
56
|
+
headers=self._get_headers(),
|
|
57
|
+
)
|
|
58
|
+
response.raise_for_status()
|
|
59
|
+
return response.json()
|