openfund-core 0.0.4__py3-none-any.whl → 1.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- core/Exchange.py +533 -0
- core/main.py +23 -0
- core/smc/SMCBase.py +130 -0
- core/smc/SMCFVG.py +86 -0
- core/smc/SMCLiquidity.py +7 -0
- core/smc/SMCOrderBlock.py +280 -0
- core/smc/SMCPDArray.py +75 -0
- core/smc/SMCStruct.py +296 -0
- core/smc/__init__.py +0 -0
- core/utils/OPTools.py +30 -0
- openfund_core-1.0.5.dist-info/METADATA +48 -0
- openfund_core-1.0.5.dist-info/RECORD +15 -0
- {openfund_core-0.0.4.dist-info → openfund_core-1.0.5.dist-info}/WHEEL +1 -1
- openfund_core-1.0.5.dist-info/entry_points.txt +3 -0
- openfund/core/__init__.py +0 -14
- openfund/core/api_tools/__init__.py +0 -16
- openfund/core/api_tools/binance_futures_tools.py +0 -23
- openfund/core/api_tools/binance_tools.py +0 -26
- openfund/core/api_tools/enums.py +0 -539
- openfund/core/base_collector.py +0 -72
- openfund/core/base_tool.py +0 -58
- openfund/core/factory.py +0 -97
- openfund/core/openfund_old/continuous_klines.py +0 -153
- openfund/core/openfund_old/depth.py +0 -92
- openfund/core/openfund_old/historical_trades.py +0 -123
- openfund/core/openfund_old/index_info.py +0 -67
- openfund/core/openfund_old/index_price_kline.py +0 -118
- openfund/core/openfund_old/klines.py +0 -95
- openfund/core/openfund_old/klines_qrr.py +0 -103
- openfund/core/openfund_old/mark_price.py +0 -121
- openfund/core/openfund_old/mark_price_klines.py +0 -122
- openfund/core/openfund_old/ticker_24hr_price_change.py +0 -99
- openfund/core/pyopenfund.py +0 -85
- openfund/core/services/um_futures_collector.py +0 -142
- openfund/core/sycu_exam/__init__.py +0 -1
- openfund/core/sycu_exam/exam.py +0 -19
- openfund/core/sycu_exam/random_grade_cplus.py +0 -440
- openfund/core/sycu_exam/random_grade_web.py +0 -404
- openfund/core/utils/time_tools.py +0 -25
- openfund_core-0.0.4.dist-info/LICENSE +0 -201
- openfund_core-0.0.4.dist-info/METADATA +0 -67
- openfund_core-0.0.4.dist-info/RECORD +0 -30
- {openfund/core/openfund_old → core}/__init__.py +0 -0
core/smc/SMCFVG.py
ADDED
@@ -0,0 +1,86 @@
|
|
1
|
+
import logging
|
2
|
+
import pandas as pd
|
3
|
+
|
4
|
+
from core.smc.SMCStruct import SMCStruct
|
5
|
+
|
6
|
+
|
7
|
+
class SMCFVG(SMCStruct):
|
8
|
+
FVG_TOP = "fvg_top"
|
9
|
+
FVG_BOT = "fvg_bot"
|
10
|
+
FVG_MID = "fvg_mid"
|
11
|
+
FVG_SIDE = "fvg_side"
|
12
|
+
FVG_WAS_BALANCED = "fvg_was_balanced"
|
13
|
+
|
14
|
+
def __init__(self):
|
15
|
+
super().__init__()
|
16
|
+
self.logger = logging.getLogger(__name__)
|
17
|
+
|
18
|
+
def find_FVGs(
|
19
|
+
self, struct: pd.DataFrame, side, check_balanced=True, start_index=-1
|
20
|
+
) -> pd.DataFrame:
|
21
|
+
"""_summary_
|
22
|
+
寻找公允价值缺口
|
23
|
+
Args:
|
24
|
+
data (pd.DataFrame): K线数据
|
25
|
+
side (_type_): 交易方向 'buy'|'sell'
|
26
|
+
threshold (_type_): 阈值价格,通常为溢价和折价区的CE
|
27
|
+
check_balanced (bool): 是否检查FVG是否被平衡过,默认为True
|
28
|
+
start_index (int): 开始查找索引的起点,默认为-1
|
29
|
+
|
30
|
+
Returns:
|
31
|
+
pd.DataFrame: _description_
|
32
|
+
|
33
|
+
"""
|
34
|
+
# bug2.2.5_1,未到折价区,计算FVG需要前一根K线
|
35
|
+
# df = data.copy().iloc[pivot_index:]
|
36
|
+
df = (
|
37
|
+
struct.copy()
|
38
|
+
if start_index == -1
|
39
|
+
else struct.copy().iloc[max(0, start_index - 1) :]
|
40
|
+
)
|
41
|
+
|
42
|
+
# 检查数据中是否包含必要的列
|
43
|
+
check_columns = [self.HIGH_COL, self.LOW_COL]
|
44
|
+
self.check_columns(df, check_columns)
|
45
|
+
|
46
|
+
# 处理公允价值缺口
|
47
|
+
# 使用向量化操作替代apply,提高性能
|
48
|
+
if side == self.BUY_SIDE:
|
49
|
+
condition = df[self.HIGH_COL].shift(1) < df[self.LOW_COL].shift(-1)
|
50
|
+
side_value = "Bullish"
|
51
|
+
price_top = df[self.LOW_COL].shift(-1)
|
52
|
+
price_bot = df[self.HIGH_COL].shift(1)
|
53
|
+
else:
|
54
|
+
condition = df[self.LOW_COL].shift(1) > df[self.HIGH_COL].shift(-1)
|
55
|
+
side_value = "Bearish"
|
56
|
+
price_top = df[self.LOW_COL].shift(1)
|
57
|
+
price_bot = df[self.HIGH_COL].shift(-1)
|
58
|
+
|
59
|
+
df.loc[:, self.FVG_SIDE] = pd.Series(
|
60
|
+
[side_value if x else None for x in condition], index=df.index
|
61
|
+
)
|
62
|
+
df.loc[:, self.FVG_TOP] = price_top.where(condition, 0)
|
63
|
+
df.loc[:, self.FVG_BOT] = price_bot.where(condition, 0)
|
64
|
+
df.loc[:, self.FVG_MID] = (df[self.FVG_TOP] + df[self.FVG_BOT]) / 2
|
65
|
+
|
66
|
+
fvg_df = df[
|
67
|
+
df[self.FVG_SIDE] == "Bullish"
|
68
|
+
if side == self.BUY_SIDE
|
69
|
+
else df[self.FVG_SIDE] == "Bearish"
|
70
|
+
]
|
71
|
+
fvg_df = fvg_df.copy()
|
72
|
+
if check_balanced:
|
73
|
+
# 检查FVG是否被平衡过
|
74
|
+
fvg_df.loc[:, self.FVG_WAS_BALANCED] = fvg_df.apply(
|
75
|
+
lambda row: any(df.loc[row.name + 2 :, self.LOW_COL] <= row[self.FVG_BOT])
|
76
|
+
if side == self.BUY_SIDE
|
77
|
+
else any(
|
78
|
+
df.loc[row.name + 2 :, self.HIGH_COL] >= row[self.FVG_TOP]
|
79
|
+
),
|
80
|
+
axis=1,
|
81
|
+
)
|
82
|
+
|
83
|
+
fvg_df = fvg_df[~fvg_df[self.FVG_WAS_BALANCED]]
|
84
|
+
|
85
|
+
return fvg_df
|
86
|
+
|
core/smc/SMCLiquidity.py
ADDED
@@ -0,0 +1,280 @@
|
|
1
|
+
import logging
|
2
|
+
import pandas as pd
|
3
|
+
|
4
|
+
from core.smc.SMCStruct import SMCStruct
|
5
|
+
|
6
|
+
|
7
|
+
class SMCOrderBlock(SMCStruct):
|
8
|
+
OB_HIGH_COL = "ob_high"
|
9
|
+
OB_LOW_COL = "ob_low"
|
10
|
+
OB_MID_COL = "ob_mid"
|
11
|
+
OB_VOLUME_COL = "ob_volume"
|
12
|
+
OB_DIRECTION_COL = "ob_direction" # 1: 向上突破, 2: 向下突破
|
13
|
+
# OB_START_INDEX_COL = "ob_start_index"
|
14
|
+
# OB_START_TS_COL = "ob_start_ts"
|
15
|
+
OB_ATR = "ob_atr"
|
16
|
+
OB_IS_COMBINED = "ob_is_combined"
|
17
|
+
OB_WAS_CROSSED = "ob_was_crossed"
|
18
|
+
|
19
|
+
def __init__(self):
|
20
|
+
super().__init__()
|
21
|
+
self.logger = logging.getLogger(__name__)
|
22
|
+
|
23
|
+
def find_OBs(
|
24
|
+
self,
|
25
|
+
struct: pd.DataFrame,
|
26
|
+
side=None,
|
27
|
+
start_index: int = -1,
|
28
|
+
is_valid: bool = True,
|
29
|
+
if_combine: bool = True,
|
30
|
+
) -> pd.DataFrame:
|
31
|
+
"""_summary_
|
32
|
+
|
33
|
+
Args:
|
34
|
+
symbol (_type_): _description_
|
35
|
+
data (pd.DataFrame): _description_
|
36
|
+
side (_type_): _description_ 如果是None, 则返回所有OB boxes(包括bullish和bearish)
|
37
|
+
pivot_index (int): _description_ 开始的位置
|
38
|
+
is_valid (bool): _description_ 找到有效的OB,没有被crossed
|
39
|
+
if_combine (bool): _description_ 是否合并OB
|
40
|
+
Returns:
|
41
|
+
list: _description_
|
42
|
+
"""
|
43
|
+
|
44
|
+
df = struct.copy() if start_index == -1 else struct.copy().iloc[start_index:]
|
45
|
+
if self.OB_DIRECTION_COL not in df.columns:
|
46
|
+
df = self.build_struct_for_ob(df)
|
47
|
+
|
48
|
+
# 获取有效的OB数据
|
49
|
+
ob_df = df[df[self.OB_DIRECTION_COL].notna()]
|
50
|
+
|
51
|
+
# 根据side过滤并生成OB
|
52
|
+
if side is not None:
|
53
|
+
direction = "Bullish" if side == self.BUY_SIDE else "Bearish"
|
54
|
+
ob_df = ob_df[ob_df[self.OB_DIRECTION_COL] == direction]
|
55
|
+
|
56
|
+
# 检查OB是否被平衡过
|
57
|
+
ob_df = ob_df.copy()
|
58
|
+
ob_df.loc[:, self.OB_WAS_CROSSED] = ob_df.apply(
|
59
|
+
lambda row: any(
|
60
|
+
df.loc[row.name + 1 :, self.LOW_COL] <= row[self.OB_LOW_COL]
|
61
|
+
)
|
62
|
+
if row[self.OB_DIRECTION_COL] == "Bullish"
|
63
|
+
else any(df.loc[row.name + 1 :, self.HIGH_COL] >= row[self.OB_HIGH_COL]),
|
64
|
+
axis=1,
|
65
|
+
)
|
66
|
+
|
67
|
+
ob_df = ob_df[~ob_df[self.OB_WAS_CROSSED]]
|
68
|
+
|
69
|
+
if if_combine:
|
70
|
+
# 合并OB
|
71
|
+
ob_df = self._combineOB(ob_df)
|
72
|
+
|
73
|
+
return ob_df
|
74
|
+
|
75
|
+
def build_struct_for_ob(
|
76
|
+
self, df, is_struct_body_break=True, atr_multiplier=0.6
|
77
|
+
):
|
78
|
+
"""
|
79
|
+
构建结构并检测Order Block
|
80
|
+
|
81
|
+
Args:
|
82
|
+
df: 数据框
|
83
|
+
window: 寻找结构极值的窗口大小
|
84
|
+
is_struct_body_break: 是否使用收盘价判断突破
|
85
|
+
ob_length: 搜索Order Block的回溯长度
|
86
|
+
atr_multiplier: ATR倍数阈值
|
87
|
+
|
88
|
+
Returns:
|
89
|
+
处理后的数据框,包含结构和Order Block相关列
|
90
|
+
"""
|
91
|
+
# 首先构建基础结构
|
92
|
+
df = self.build_struct(df, is_struct_body_break)
|
93
|
+
|
94
|
+
check_columns = [self.HIGH_COL, self.LOW_COL, self.CLOSE_COL]
|
95
|
+
self.check_columns(df, check_columns)
|
96
|
+
|
97
|
+
# 初始化OB相关列
|
98
|
+
ob_columns = [
|
99
|
+
self.OB_HIGH_COL,
|
100
|
+
self.OB_LOW_COL,
|
101
|
+
self.OB_MID_COL,
|
102
|
+
self.OB_VOLUME_COL,
|
103
|
+
self.OB_DIRECTION_COL,
|
104
|
+
# self.OB_START_INDEX_COL,
|
105
|
+
# self.OB_START_TS_COL,
|
106
|
+
self.OB_ATR,
|
107
|
+
]
|
108
|
+
for col in ob_columns:
|
109
|
+
df[col] = None
|
110
|
+
|
111
|
+
# 计算ATR用于阈值判断
|
112
|
+
df[self.ATR_COL] = self._calculate_atr(df)
|
113
|
+
|
114
|
+
# 检测Order Block
|
115
|
+
for i in range(1, len(df)):
|
116
|
+
# 检查是否为结构高点突破
|
117
|
+
if df.at[i, self.STRUCT_COL] and "Bullish" in df.at[i, self.STRUCT_COL]:
|
118
|
+
self._find_ob(df, i, atr_multiplier)
|
119
|
+
|
120
|
+
# 检查是否为结构低点突破
|
121
|
+
elif df.at[i, self.STRUCT_COL] and "Bearish" in df.at[i, self.STRUCT_COL]:
|
122
|
+
self._find_ob(df, i, atr_multiplier, is_bullish=False)
|
123
|
+
|
124
|
+
return df
|
125
|
+
|
126
|
+
def _combineOB(self, df_OBs, combine_atr_muiltiplier=0.2):
|
127
|
+
"""
|
128
|
+
合并OB
|
129
|
+
"""
|
130
|
+
|
131
|
+
df_ob = df_OBs.copy()
|
132
|
+
# 初始化 OB_IS_COMBINED 列为 0
|
133
|
+
df_ob[self.OB_IS_COMBINED] = 0
|
134
|
+
|
135
|
+
combine_atr_muiltiplier = self.toDecimal(combine_atr_muiltiplier)
|
136
|
+
# 遍历所有OB,检查是否需要合并
|
137
|
+
for i in range(len(df_ob)):
|
138
|
+
# 如果当前OB已被合并,跳过
|
139
|
+
if df_ob.iloc[i][self.OB_IS_COMBINED] == 1:
|
140
|
+
continue
|
141
|
+
|
142
|
+
current_direction = df_ob.iloc[i][self.OB_DIRECTION_COL]
|
143
|
+
current_mid = df_ob.iloc[i][self.OB_MID_COL]
|
144
|
+
current_atr = df_ob.iloc[i][self.OB_ATR]
|
145
|
+
|
146
|
+
# 检查后续的OB
|
147
|
+
for j in range(i + 1, len(df_ob)):
|
148
|
+
# 如果后续OB已被合并,跳过
|
149
|
+
if df_ob.iloc[j][self.OB_IS_COMBINED] == 1:
|
150
|
+
continue
|
151
|
+
|
152
|
+
# 如果方向相同且中间价差值小于阈值,标记为已合并
|
153
|
+
if (
|
154
|
+
df_ob.iloc[j][self.OB_DIRECTION_COL] == current_direction
|
155
|
+
and abs(df_ob.iloc[j][self.OB_MID_COL] - current_mid)
|
156
|
+
< current_atr * combine_atr_muiltiplier
|
157
|
+
):
|
158
|
+
df_ob.iloc[i, df_ob.columns.get_loc(self.OB_IS_COMBINED)] = 1
|
159
|
+
break
|
160
|
+
# 遍历所有的OB
|
161
|
+
|
162
|
+
return df_ob
|
163
|
+
|
164
|
+
def _calculate_atr(self, df, period=200, multiplier=1):
|
165
|
+
return super().calculate_atr(df, period, multiplier)
|
166
|
+
|
167
|
+
def _find_ob(self, df, i, atr_multiplier, is_bullish=True):
|
168
|
+
"""寻找Order Block
|
169
|
+
Args:
|
170
|
+
df: 数据框
|
171
|
+
i: 当前索引
|
172
|
+
ob_length: OB长度
|
173
|
+
atr_multiplier: ATR乘数
|
174
|
+
is_bullish: 是否为看涨OB,True为看涨,False为看跌
|
175
|
+
"""
|
176
|
+
# 根据方向获取相应的结构索引和价格,OB是取结构开始的最高价或最低价的K线
|
177
|
+
if is_bullish:
|
178
|
+
index = df.loc[i, self.STRUCT_LOW_INDEX_COL]
|
179
|
+
extreme_price = df.loc[i, self.STRUCT_LOW_COL]
|
180
|
+
|
181
|
+
# Oper_func = min
|
182
|
+
src = self.toDecimal(df.loc[index, self.HIGH_COL])
|
183
|
+
direction = "Bullish"
|
184
|
+
|
185
|
+
else:
|
186
|
+
index = df.loc[i, self.STRUCT_HIGH_INDEX_COL]
|
187
|
+
extreme_price = df.loc[i, self.STRUCT_HIGH_COL]
|
188
|
+
# Oper_func = max
|
189
|
+
src = self.toDecimal(df.loc[index, self.LOW_COL])
|
190
|
+
direction = "Bearish"
|
191
|
+
|
192
|
+
# 计算累积成交量
|
193
|
+
vol = df.loc[index:i, self.VOLUME_COL].sum()
|
194
|
+
|
195
|
+
# 应用ATR阈值,如果OB的范围小于ATR(=20)的60%(20*0.6=12),则把OB可扩展到ATR(=20)范围。
|
196
|
+
precision = self.get_precision_length(extreme_price)
|
197
|
+
atr = self.toDecimal(df.loc[i, self.ATR_COL])
|
198
|
+
atr_multiplier = self.toDecimal(atr_multiplier)
|
199
|
+
if is_bullish:
|
200
|
+
# 计算当前区间大小
|
201
|
+
current_range = src - extreme_price
|
202
|
+
target_range = atr
|
203
|
+
if current_range < atr * atr_multiplier:
|
204
|
+
# 如果区间过小,将区间扩展到目标大小,并在中心点两侧平均分配
|
205
|
+
extend_amount = (target_range - current_range) / 2
|
206
|
+
src += extend_amount
|
207
|
+
extreme_price -= extend_amount
|
208
|
+
|
209
|
+
high, low = (
|
210
|
+
self.toDecimal(src, precision),
|
211
|
+
self.toDecimal(extreme_price, precision),
|
212
|
+
)
|
213
|
+
else:
|
214
|
+
# 计算当前区间大小
|
215
|
+
current_range = extreme_price - src
|
216
|
+
target_range = atr
|
217
|
+
if current_range < atr * atr_multiplier:
|
218
|
+
# 如果区间过小,将区间扩展到目标大小,并在中心点两侧平均分配
|
219
|
+
extend_amount = (target_range - current_range) / 2
|
220
|
+
src -= extend_amount
|
221
|
+
extreme_price += extend_amount
|
222
|
+
|
223
|
+
high, low = (
|
224
|
+
self.toDecimal(extreme_price, precision),
|
225
|
+
self.toDecimal(src, precision),
|
226
|
+
)
|
227
|
+
|
228
|
+
# 计算中间值
|
229
|
+
mid = (high + low) / 2
|
230
|
+
|
231
|
+
# 更新OB信息到DataFrame
|
232
|
+
df.at[index, self.OB_HIGH_COL] = high
|
233
|
+
df.at[index, self.OB_LOW_COL] = low
|
234
|
+
df.at[index, self.OB_MID_COL] = mid
|
235
|
+
df.at[index, self.OB_VOLUME_COL] = vol
|
236
|
+
df.at[index, self.OB_DIRECTION_COL] = direction
|
237
|
+
# df.at[i, self.OB_START_INDEX_COL] = index
|
238
|
+
# df.at[i, self.OB_START_TS_COL] = df.loc[index, self.TIMESTAMP_COL]
|
239
|
+
df.at[index, self.OB_ATR] = atr
|
240
|
+
|
241
|
+
def get_lastest_OB(self, data, trend, start_index=-1):
|
242
|
+
"""
|
243
|
+
获取最新的Order Block
|
244
|
+
|
245
|
+
Args:
|
246
|
+
df: 数据框
|
247
|
+
trend: 趋势,"Bullish" 或 "Bearish"
|
248
|
+
|
249
|
+
Returns:
|
250
|
+
最新的Order Block信息或None
|
251
|
+
"""
|
252
|
+
# 获取prd范围内的数据
|
253
|
+
df = (
|
254
|
+
data.copy()
|
255
|
+
if start_index == -1
|
256
|
+
else data.copy().iloc[start_index :]
|
257
|
+
)
|
258
|
+
|
259
|
+
# 检查数据中是否包含必要的列
|
260
|
+
check_columns = [self.OB_DIRECTION_COL]
|
261
|
+
self.check_columns(df, check_columns)
|
262
|
+
|
263
|
+
# 筛选有效OB且在prd范围内的数据
|
264
|
+
mask = df[self.OB_DIRECTION_COL] == trend
|
265
|
+
valid_obs = df[mask]
|
266
|
+
|
267
|
+
if not valid_obs.empty:
|
268
|
+
# 获取最近的OB
|
269
|
+
last_ob = valid_obs.iloc[-1]
|
270
|
+
return {
|
271
|
+
self.OB_HIGH_COL: last_ob[self.OB_HIGH_COL],
|
272
|
+
self.OB_LOW_COL: last_ob[self.OB_LOW_COL],
|
273
|
+
self.OB_MID_COL: last_ob[self.OB_MID_COL],
|
274
|
+
self.OB_VOLUME_COL: last_ob[self.OB_VOLUME_COL],
|
275
|
+
self.OB_DIRECTION_COL: last_ob[self.OB_DIRECTION_COL],
|
276
|
+
self.OB_ATR: last_ob[self.OB_ATR],
|
277
|
+
self.OB_WAS_CROSSED: last_ob[self.OB_WAS_CROSSED],
|
278
|
+
}
|
279
|
+
|
280
|
+
return None
|
core/smc/SMCPDArray.py
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
import logging
|
2
|
+
import pandas as pd
|
3
|
+
|
4
|
+
from core.smc.SMCFVG import SMCFVG
|
5
|
+
from core.smc.SMCOrderBlock import SMCOrderBlock
|
6
|
+
|
7
|
+
class SMCPDArray(SMCFVG,SMCOrderBlock):
|
8
|
+
PD_HIGH_COL = "pd_high"
|
9
|
+
PD_LOW_COL = "pd_low"
|
10
|
+
PD_MID_COL = "pd_mid"
|
11
|
+
PD_TYPE_COL = "pd_type"
|
12
|
+
|
13
|
+
def __init__(self):
|
14
|
+
super().__init__()
|
15
|
+
self.logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
def find_PDArrays(
|
18
|
+
self, struct: pd.DataFrame, side, start_index=-1
|
19
|
+
) -> pd.DataFrame:
|
20
|
+
"""_summary_
|
21
|
+
寻找PDArrays,包括Fair Value Gap (FVG)|Order Block (OB)|Breaker Block(BB)|Mitigation Block(BB)
|
22
|
+
Args:
|
23
|
+
data (pd.DataFrame): K线数据
|
24
|
+
side (_type_): 交易方向 'buy'|'sell'
|
25
|
+
threshold (_type_): 阈值价格,通常为溢价和折价区的CE
|
26
|
+
check_balanced (bool): 是否检查FVG是否被平衡过,默认为True
|
27
|
+
start_index (int): 开始查找索引的起点,默认为-1
|
28
|
+
|
29
|
+
Returns:
|
30
|
+
pd.DataFrame: _description_
|
31
|
+
|
32
|
+
"""
|
33
|
+
|
34
|
+
df = (
|
35
|
+
struct.copy()
|
36
|
+
if start_index == -1
|
37
|
+
else struct.copy().iloc[max(0, start_index - 1) :]
|
38
|
+
)
|
39
|
+
|
40
|
+
df_FVGs = self.find_FVGs(df, side)
|
41
|
+
# self.logger.info(f"fvgs:\n{df_FVGs[['timestamp', self.FVG_SIDE, self.FVG_TOP, self.FVG_BOT, self.FVG_WAS_BALANCED]]}")
|
42
|
+
|
43
|
+
|
44
|
+
df_OBs = self.find_OBs(df, side)
|
45
|
+
# self.logger.info("find_OBs:\n %s", df_OBs)
|
46
|
+
|
47
|
+
# 使用更简洁的方式重命名和合并时间戳列
|
48
|
+
timestamp_mapping = {self.TIMESTAMP_COL: ['ts_OBs', 'ts_FVGs']}
|
49
|
+
df_OBs = df_OBs.rename(columns={self.TIMESTAMP_COL: timestamp_mapping[self.TIMESTAMP_COL][0]})
|
50
|
+
df_FVGs = df_FVGs.rename(columns={self.TIMESTAMP_COL: timestamp_mapping[self.TIMESTAMP_COL][1]})
|
51
|
+
|
52
|
+
# 使用更高效的方式合并数据框
|
53
|
+
df_PDArrays = pd.concat(
|
54
|
+
[df_OBs, df_FVGs],
|
55
|
+
axis=1,
|
56
|
+
join='outer'
|
57
|
+
).sort_index()
|
58
|
+
|
59
|
+
# 使用更清晰的方式合并时间戳列
|
60
|
+
df_PDArrays[self.TIMESTAMP_COL] = df_PDArrays[timestamp_mapping[self.TIMESTAMP_COL][0]].fillna(
|
61
|
+
df_PDArrays[timestamp_mapping[self.TIMESTAMP_COL][1]]
|
62
|
+
)
|
63
|
+
df_PDArrays[self.PD_TYPE_COL] = df_PDArrays[[self.FVG_SIDE, self.OB_DIRECTION_COL]].apply(
|
64
|
+
lambda x: 'FVG-OB' if pd.notna(x.iloc[0]) and pd.notna(x.iloc[1]) else 'FVG' if pd.notna(x.iloc[0]) else 'OB', axis=1
|
65
|
+
)
|
66
|
+
|
67
|
+
df_PDArrays.loc[:, self.PD_HIGH_COL] = df_PDArrays[[self.FVG_TOP, self.OB_HIGH_COL]].max(axis=1)
|
68
|
+
df_PDArrays.loc[:, self.PD_LOW_COL] = df_PDArrays[[self.FVG_BOT, self.OB_LOW_COL]].min(axis=1)
|
69
|
+
df_PDArrays.loc[:, self.PD_MID_COL] = (df_PDArrays[self.PD_HIGH_COL] + df_PDArrays[self.PD_LOW_COL]) / 2
|
70
|
+
|
71
|
+
|
72
|
+
|
73
|
+
|
74
|
+
return df_PDArrays
|
75
|
+
|