siglab-py 0.1.19__py3-none-any.whl → 0.6.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siglab_py/algo/__init__.py +0 -0
- siglab_py/algo/macdrsi_crosses_15m_tc_strategy.py +107 -0
- siglab_py/algo/strategy_base.py +122 -0
- siglab_py/algo/strategy_executor.py +1308 -0
- siglab_py/algo/tp_algo.py +529 -0
- siglab_py/backtests/__init__.py +0 -0
- siglab_py/backtests/backtest_core.py +2405 -0
- siglab_py/backtests/coinflip_15m_crypto.py +432 -0
- siglab_py/backtests/fibonacci_d_mv_crypto.py +541 -0
- siglab_py/backtests/macdrsi_crosses_15m_tc_crypto.py +473 -0
- siglab_py/constants.py +26 -1
- siglab_py/exchanges/binance.py +38 -0
- siglab_py/exchanges/deribit.py +83 -0
- siglab_py/exchanges/futubull.py +33 -3
- siglab_py/market_data_providers/candles_provider.py +11 -10
- siglab_py/market_data_providers/candles_ta_provider.py +5 -5
- siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +238 -0
- siglab_py/market_data_providers/futu_candles_ta_to_csv.py +224 -0
- siglab_py/market_data_providers/google_monitor.py +320 -0
- siglab_py/market_data_providers/orderbooks_provider.py +15 -12
- siglab_py/market_data_providers/tg_monitor.py +428 -0
- siglab_py/market_data_providers/{test_provider.py → trigger_provider.py} +9 -8
- siglab_py/ordergateway/client.py +172 -41
- siglab_py/ordergateway/encrypt_keys_util.py +1 -1
- siglab_py/ordergateway/gateway.py +456 -344
- siglab_py/ordergateway/test_ordergateway.py +8 -7
- siglab_py/tests/integration/market_data_util_tests.py +80 -6
- siglab_py/tests/unit/analytic_util_tests.py +67 -4
- siglab_py/tests/unit/market_data_util_tests.py +96 -0
- siglab_py/tests/unit/simple_math_tests.py +252 -0
- siglab_py/tests/unit/trading_util_tests.py +65 -0
- siglab_py/util/analytic_util.py +484 -66
- siglab_py/util/datetime_util.py +39 -0
- siglab_py/util/market_data_util.py +564 -74
- siglab_py/util/module_util.py +40 -0
- siglab_py/util/notification_util.py +78 -0
- siglab_py/util/retry_util.py +16 -3
- siglab_py/util/simple_math.py +262 -0
- siglab_py/util/slack_notification_util.py +59 -0
- siglab_py/util/trading_util.py +118 -0
- {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/METADATA +5 -13
- siglab_py-0.6.33.dist-info/RECORD +56 -0
- {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/WHEEL +1 -1
- siglab_py-0.1.19.dist-info/RECORD +0 -31
- {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/top_level.txt +0 -0
siglab_py/util/analytic_util.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import tzlocal
|
|
2
2
|
from datetime import datetime, timezone
|
|
3
3
|
from typing import List, Dict, Union, NoReturn, Any, Tuple
|
|
4
|
+
from enum import Enum
|
|
4
5
|
from pathlib import Path
|
|
5
6
|
import math
|
|
6
7
|
import pandas as pd
|
|
@@ -10,7 +11,47 @@ from hurst import compute_Hc # compatible with pypy
|
|
|
10
11
|
from ccxt.base.exchange import Exchange as CcxtExchange
|
|
11
12
|
from ccxt import deribit
|
|
12
13
|
|
|
13
|
-
from util.
|
|
14
|
+
from siglab_py.util.simple_math import bucket_series, bucketize_val
|
|
15
|
+
from siglab_py.util.market_data_util import fix_column_types
|
|
16
|
+
from siglab_py.constants import TrendDirection
|
|
17
|
+
|
|
18
|
+
def classify_candle(
|
|
19
|
+
candle : pd.Series,
|
|
20
|
+
min_candle_height_ratio : float = 5,
|
|
21
|
+
distance_from_mid_doji_threshold_bps : float = 10
|
|
22
|
+
) -> Union[str, None]:
|
|
23
|
+
candle_class : Union[str, None] = None
|
|
24
|
+
open = candle['open']
|
|
25
|
+
high = candle['high']
|
|
26
|
+
low = candle['low']
|
|
27
|
+
close = candle['close']
|
|
28
|
+
candle_full_height = high - low # always positive
|
|
29
|
+
candle_body_height = close - open # can be negative
|
|
30
|
+
candle_full_mid = (high + low)/2
|
|
31
|
+
candle_body_mid = (open + close)/2
|
|
32
|
+
distance_from_mid_bps = (candle_full_mid/candle_body_mid -1)*10000 if candle_full_mid>candle_body_mid else (candle_body_mid/candle_full_mid -1)*10000
|
|
33
|
+
|
|
34
|
+
candle_height_ratio = candle_full_height / abs(candle_body_height) if candle_body_height!=0 else float('inf')
|
|
35
|
+
|
|
36
|
+
if (
|
|
37
|
+
candle_height_ratio>=min_candle_height_ratio
|
|
38
|
+
and close>low
|
|
39
|
+
):
|
|
40
|
+
candle_class = 'hammer'
|
|
41
|
+
elif (
|
|
42
|
+
candle_height_ratio>=min_candle_height_ratio
|
|
43
|
+
and close<high
|
|
44
|
+
):
|
|
45
|
+
candle_class = 'shooting_star'
|
|
46
|
+
elif(
|
|
47
|
+
candle_height_ratio>=min_candle_height_ratio
|
|
48
|
+
and distance_from_mid_bps<=distance_from_mid_doji_threshold_bps
|
|
49
|
+
):
|
|
50
|
+
candle_class = 'doji'
|
|
51
|
+
|
|
52
|
+
# Keep add more ...
|
|
53
|
+
|
|
54
|
+
return candle_class
|
|
14
55
|
|
|
15
56
|
# Fibonacci
|
|
16
57
|
MAGIC_FIB_LEVELS = [0, 0.236, 0.382, 0.5, 0.618, 0.786, 1.00, 1.618, 2.618, 3.618, 4.236]
|
|
@@ -32,17 +73,64 @@ def estimate_fib_retracement(
|
|
|
32
73
|
|
|
33
74
|
return retracement_price
|
|
34
75
|
|
|
76
|
+
def calculate_slope(
|
|
77
|
+
pd_data : pd.DataFrame,
|
|
78
|
+
src_col_name : str,
|
|
79
|
+
slope_col_name : str,
|
|
80
|
+
sliding_window_how_many_candles : int
|
|
81
|
+
):
|
|
82
|
+
import statsmodels.api as sm # in-compatible with pypy
|
|
83
|
+
|
|
84
|
+
X = sm.add_constant(range(len(pd_data[src_col_name])))
|
|
85
|
+
rolling_slope = pd_data[src_col_name].rolling(window=sliding_window_how_many_candles).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
86
|
+
pd_data[slope_col_name] = rolling_slope
|
|
87
|
+
max_abs_slope = pd_data[slope_col_name].abs().rolling(window=sliding_window_how_many_candles).max()
|
|
88
|
+
pd_data[f"normalized_{slope_col_name}"] = pd_data[slope_col_name] / max_abs_slope
|
|
89
|
+
normalized_slope_rolling = pd_data[f"normalized_{slope_col_name}"].rolling(window=sliding_window_how_many_candles)
|
|
90
|
+
pd_data[f"normalized_{slope_col_name}_min"] = normalized_slope_rolling.min()
|
|
91
|
+
pd_data[f"normalized_{slope_col_name}_max"] = normalized_slope_rolling.max()
|
|
92
|
+
pd_data[f"normalized_{slope_col_name}_idmin"] = normalized_slope_rolling.apply(lambda x : x.idxmin())
|
|
93
|
+
pd_data[f"normalized_{slope_col_name}_idmax"] = normalized_slope_rolling.apply(lambda x : x.idxmax())
|
|
94
|
+
|
|
95
|
+
def trend_from_highs(series: np.ndarray) -> float:
|
|
96
|
+
valid_series = series[~np.isnan(series)]
|
|
97
|
+
unique_maxima = valid_series[np.concatenate(([True], np.diff(valid_series) != 0))]
|
|
98
|
+
if len(unique_maxima) < 2:
|
|
99
|
+
return TrendDirection.UNDEFINED.value
|
|
100
|
+
first, last = unique_maxima[0], unique_maxima[-1]
|
|
101
|
+
if first > last:
|
|
102
|
+
return TrendDirection.LOWER_HIGHS.value
|
|
103
|
+
elif first < last:
|
|
104
|
+
return TrendDirection.HIGHER_HIGHS.value
|
|
105
|
+
else:
|
|
106
|
+
return TrendDirection.SIDEWAYS.value
|
|
107
|
+
|
|
108
|
+
def trend_from_lows(series: np.ndarray) -> float:
|
|
109
|
+
valid_series = series[~np.isnan(series)]
|
|
110
|
+
unique_minima = valid_series[np.concatenate(([True], np.diff(valid_series) != 0))]
|
|
111
|
+
if len(unique_minima) < 2:
|
|
112
|
+
return TrendDirection.UNDEFINED.value
|
|
113
|
+
first, last = unique_minima[0], unique_minima[-1]
|
|
114
|
+
if first > last:
|
|
115
|
+
return TrendDirection.LOWER_LOWS.value
|
|
116
|
+
elif first < last:
|
|
117
|
+
return TrendDirection.HIGHER_LOWS.value
|
|
118
|
+
else:
|
|
119
|
+
return TrendDirection.SIDEWAYS.value
|
|
120
|
+
|
|
121
|
+
|
|
35
122
|
'''
|
|
36
123
|
compute_candles_stats will calculate typical/basic technical indicators using in many trading strategies:
|
|
37
124
|
a. Basic SMA/EMAs (And slopes)
|
|
38
|
-
b.
|
|
39
|
-
c.
|
|
40
|
-
d.
|
|
41
|
-
e.
|
|
42
|
-
f.
|
|
43
|
-
g.
|
|
44
|
-
h.
|
|
45
|
-
i.
|
|
125
|
+
b. EMA crosses
|
|
126
|
+
c. ATR
|
|
127
|
+
d. Boillenger bands (Yes incorrect spelling sorry)
|
|
128
|
+
e. FVG
|
|
129
|
+
f. Hurst Exponent
|
|
130
|
+
g. RSI, MFI
|
|
131
|
+
h. MACD
|
|
132
|
+
i. Fibonacci
|
|
133
|
+
j. Inflections points: where 'close' crosses EMA from above or below.
|
|
46
134
|
|
|
47
135
|
Parameters:
|
|
48
136
|
a. boillenger_std_multiples: For boillenger upper and lower calc
|
|
@@ -64,12 +152,21 @@ def compute_candles_stats(
|
|
|
64
152
|
rsi_ema : bool = True,
|
|
65
153
|
boillenger_ema : bool = False,
|
|
66
154
|
slow_fast_interval_ratio : float = 3,
|
|
67
|
-
rsi_sliding_window_how_many_candles :
|
|
155
|
+
rsi_sliding_window_how_many_candles : int = 14, # RSI standard 14
|
|
156
|
+
rsi_trend_sliding_window_how_many_candles : int = 24*7, # This is for purpose of RSI trend identification (Locating local peaks/troughs in RSI). This should typically be multiples of 'rsi_sliding_window_how_many_candles'.
|
|
68
157
|
hurst_exp_window_how_many_candles : Union[int, None] = None, # Hurst exp standard 100-200
|
|
69
158
|
boillenger_std_multiples_for_aggressive_moves_detect : int = 3, # Aggressive moves if candle low/high breaches boillenger bands from 3 standard deviations.
|
|
159
|
+
target_fib_level : float = 0.618,
|
|
70
160
|
pypy_compat : bool = True
|
|
71
161
|
):
|
|
162
|
+
BUCKETS_m0_100 = bucket_series(
|
|
163
|
+
values=list([i for i in range(0,100)]),
|
|
164
|
+
outlier_threshold_percent=10,
|
|
165
|
+
level_granularity=0.1
|
|
166
|
+
)
|
|
167
|
+
|
|
72
168
|
pd_candles['candle_height'] = pd_candles['high'] - pd_candles['low']
|
|
169
|
+
pd_candles['candle_body_height'] = pd_candles['close'] - pd_candles['open']
|
|
73
170
|
|
|
74
171
|
'''
|
|
75
172
|
market_data_gizmo inserted dummy lines --> Need exclude those or "TypeError: unorderable types for comparison": pd_btc_candles = pd_btc_candles[pd_btc_candles.close.notnull()]
|
|
@@ -86,27 +183,145 @@ def compute_candles_stats(
|
|
|
86
183
|
|
|
87
184
|
pd_candles['is_green'] = pd_candles['close'] >= pd_candles['open']
|
|
88
185
|
|
|
186
|
+
pd_candles['candle_class'] = pd_candles.apply(lambda row: classify_candle(row), axis=1) # type: ignore
|
|
187
|
+
|
|
188
|
+
close_short_periods_rolling = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio))
|
|
189
|
+
close_long_periods_rolling = pd_candles['close'].rolling(window=sliding_window_how_many_candles)
|
|
190
|
+
close_short_periods_ewm = pd_candles['close'].ewm(span=int(sliding_window_how_many_candles/slow_fast_interval_ratio), adjust=False)
|
|
191
|
+
close_long_periods_ewm = pd_candles['close'].ewm(span=sliding_window_how_many_candles, adjust=False)
|
|
192
|
+
|
|
89
193
|
pd_candles['pct_change_close'] = pd_candles['close'].pct_change() * 100
|
|
90
|
-
pd_candles['sma_short_periods'] =
|
|
91
|
-
pd_candles['sma_long_periods'] =
|
|
92
|
-
pd_candles['ema_short_periods'] =
|
|
93
|
-
pd_candles['ema_long_periods'] =
|
|
194
|
+
pd_candles['sma_short_periods'] = close_short_periods_rolling.mean()
|
|
195
|
+
pd_candles['sma_long_periods'] = close_long_periods_rolling.mean()
|
|
196
|
+
pd_candles['ema_short_periods'] = close_short_periods_ewm.mean()
|
|
197
|
+
pd_candles['ema_long_periods'] = close_long_periods_ewm.mean()
|
|
94
198
|
pd_candles['ema_close'] = pd_candles['ema_long_periods'] # Alias, shorter name
|
|
95
|
-
pd_candles['std'] =
|
|
199
|
+
pd_candles['std'] = close_long_periods_rolling.std()
|
|
200
|
+
pd_candles['std_percent'] = pd_candles['std'] / pd_candles['ema_close'] * 100
|
|
201
|
+
|
|
202
|
+
pd_candles['vwap_short_periods'] = (pd_candles['close'] * pd_candles['volume']).rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).sum() / pd_candles['volume'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).sum()
|
|
203
|
+
pd_candles['vwap_long_periods'] = (pd_candles['close'] * pd_candles['volume']).rolling(window=sliding_window_how_many_candles).sum() / pd_candles['volume'].rolling(window=sliding_window_how_many_candles).sum()
|
|
204
|
+
|
|
205
|
+
pd_candles['candle_height_percent'] = pd_candles['candle_height'] / pd_candles['ema_close'] * 100
|
|
206
|
+
pd_candles['candle_height_percent_rounded'] = pd_candles['candle_height_percent'].round().astype('Int64')
|
|
207
|
+
|
|
208
|
+
pd_candles['candle_body_height_percent'] = pd_candles['candle_body_height'] / pd_candles['ema_close'] * 100
|
|
209
|
+
pd_candles['candle_body_height_percent_rounded'] = pd_candles['candle_body_height_percent'].round().astype('Int64')
|
|
210
|
+
|
|
211
|
+
'''
|
|
212
|
+
To annualize volatility:
|
|
213
|
+
if candle_interval == '1m':
|
|
214
|
+
annualization_factor = np.sqrt(365 * 24 * 60) # 1-minute candles
|
|
215
|
+
elif candle_interval == '1h':
|
|
216
|
+
annualization_factor = np.sqrt(365 * 24) # 1-hour candles
|
|
217
|
+
elif candle_interval == '1d':
|
|
218
|
+
annualization_factor = np.sqrt(365) # 1-day candles
|
|
219
|
+
pd_candles['annualized_volatility'] = (
|
|
220
|
+
pd_candles['interval_historical_volatility'] * annualization_factor
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
Why log return? Trading Dude https://python.plainenglish.io/stop-using-percentage-returns-logarithmic-returns-explained-with-code-64a4634b883a
|
|
224
|
+
'''
|
|
225
|
+
pd_candles['log_return'] = np.log(pd_candles['close'] / pd_candles['close'].shift(1))
|
|
226
|
+
pd_candles['interval_hist_vol'] = pd_candles['log_return'].rolling(window=sliding_window_how_many_candles).std()
|
|
227
|
+
|
|
228
|
+
time_gap_sec = int(pd_candles['timestamp_ms'].iloc[1] - pd_candles['timestamp_ms'].iloc[0])/1000
|
|
229
|
+
seconds_in_year = 365 * 24 * 60 * 60
|
|
230
|
+
candles_per_year = seconds_in_year / time_gap_sec
|
|
231
|
+
annualization_factor = np.sqrt(candles_per_year)
|
|
232
|
+
pd_candles['annualized_hist_vol'] = pd_candles['interval_hist_vol'] * annualization_factor
|
|
233
|
+
|
|
234
|
+
pd_candles['chop_against_ema'] = (
|
|
235
|
+
(~pd_candles['is_green'] & (pd_candles['close'] > pd_candles['ema_close'])) | # Case 1: Green candle and close > EMA
|
|
236
|
+
(pd_candles['is_green'] & (pd_candles['close'] < pd_candles['ema_close'])) # Case 2: Red candle and close < EMA
|
|
237
|
+
)
|
|
96
238
|
|
|
97
239
|
pd_candles['ema_volume_short_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles/slow_fast_interval_ratio, adjust=False).mean()
|
|
98
240
|
pd_candles['ema_volume_long_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles, adjust=False).mean()
|
|
99
241
|
|
|
100
|
-
pd_candles['
|
|
101
|
-
pd_candles['
|
|
102
|
-
pd_candles['
|
|
103
|
-
pd_candles['
|
|
242
|
+
pd_candles['ema_cross'] = None
|
|
243
|
+
pd_candles['ema_cross_last'] = None
|
|
244
|
+
pd_candles['ema_bullish_cross_last_id'] = None
|
|
245
|
+
pd_candles['ema_bearish_cross_last_id'] = None
|
|
246
|
+
ema_short_periods_prev = pd_candles['ema_short_periods'].shift(1)
|
|
247
|
+
ema_long_periods_prev = pd_candles['ema_long_periods'].shift(1)
|
|
248
|
+
ema_short_periods_curr = pd_candles['ema_short_periods']
|
|
249
|
+
ema_long_periods_curr = pd_candles['ema_long_periods']
|
|
250
|
+
bullish_ema_crosses = (ema_short_periods_prev <= ema_long_periods_prev) & (ema_short_periods_curr > ema_long_periods_curr)
|
|
251
|
+
bearish_ema_crosses = (ema_short_periods_prev >= ema_long_periods_prev) & (ema_short_periods_curr < ema_long_periods_curr)
|
|
252
|
+
pd_candles.loc[bullish_ema_crosses, 'ema_cross'] = 1
|
|
253
|
+
pd_candles.loc[bearish_ema_crosses, 'ema_cross'] = -1
|
|
254
|
+
bullish_indices = pd.Series(pd_candles.index.where(pd_candles['ema_cross'] == 1), index=pd_candles.index).astype('Int64')
|
|
255
|
+
bearish_indices = pd.Series(pd_candles.index.where(pd_candles['ema_cross'] == -1), index=pd_candles.index).astype('Int64')
|
|
256
|
+
pd_candles['ema_bullish_cross_last_id'] = bullish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
257
|
+
pd_candles['ema_bearish_cross_last_id'] = bearish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
258
|
+
conditions = [
|
|
259
|
+
(pd_candles['ema_bullish_cross_last_id'].notna() &
|
|
260
|
+
pd_candles['ema_bearish_cross_last_id'].notna() &
|
|
261
|
+
(pd_candles['ema_bullish_cross_last_id'] > pd_candles['ema_bearish_cross_last_id'])),
|
|
262
|
+
|
|
263
|
+
(pd_candles['ema_bullish_cross_last_id'].notna() &
|
|
264
|
+
pd_candles['ema_bearish_cross_last_id'].notna() &
|
|
265
|
+
(pd_candles['ema_bearish_cross_last_id'] > pd_candles['ema_bullish_cross_last_id'])),
|
|
266
|
+
|
|
267
|
+
(pd_candles['ema_bullish_cross_last_id'].notna() &
|
|
268
|
+
pd_candles['ema_bearish_cross_last_id'].isna()),
|
|
269
|
+
|
|
270
|
+
(pd_candles['ema_bearish_cross_last_id'].notna() &
|
|
271
|
+
pd_candles['ema_bullish_cross_last_id'].isna())
|
|
272
|
+
]
|
|
273
|
+
choices = ['bullish', 'bearish', 'bullish', 'bearish']
|
|
274
|
+
pd_candles['ema_cross_last'] = np.select(conditions, choices, default=None) # type: ignore
|
|
275
|
+
pd_candles.loc[bullish_ema_crosses, 'ema_cross'] = 'bullish'
|
|
276
|
+
pd_candles.loc[bearish_ema_crosses, 'ema_cross'] = 'bearish'
|
|
277
|
+
|
|
278
|
+
pd_candles['max_short_periods'] = close_short_periods_rolling.max()
|
|
279
|
+
pd_candles['max_long_periods'] = close_long_periods_rolling.max()
|
|
280
|
+
pd_candles['idmax_short_periods'] = close_short_periods_rolling.apply(lambda x : x.idxmax())
|
|
281
|
+
pd_candles['idmax_long_periods'] = close_long_periods_rolling.apply(lambda x : x.idxmax())
|
|
282
|
+
|
|
283
|
+
pd_candles['min_short_periods'] = close_short_periods_rolling.min()
|
|
284
|
+
pd_candles['min_long_periods'] = close_long_periods_rolling.min()
|
|
285
|
+
pd_candles['idmin_short_periods'] = close_short_periods_rolling.apply(lambda x : x.idxmin())
|
|
286
|
+
pd_candles['idmin_long_periods'] = close_long_periods_rolling.apply(lambda x : x.idxmin())
|
|
287
|
+
|
|
288
|
+
pd_candles['max_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).max()
|
|
289
|
+
pd_candles['idmax_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).apply(lambda x : x.idxmax())
|
|
290
|
+
pd_candles['min_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).min()
|
|
291
|
+
pd_candles['idmin_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).apply(lambda x : x.idxmin())
|
|
292
|
+
|
|
293
|
+
pd_candles['price_swing_short_periods'] = np.where(
|
|
294
|
+
pd_candles['idmax_short_periods'] > pd_candles['idmin_short_periods'],
|
|
295
|
+
pd_candles['max_short_periods'] - pd_candles['min_short_periods'], # Up swing
|
|
296
|
+
pd_candles['min_short_periods'] - pd_candles['max_short_periods'] # Down swing (negative)
|
|
297
|
+
)
|
|
104
298
|
|
|
105
|
-
pd_candles['
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
299
|
+
pd_candles['price_swing_long_periods'] = np.where(
|
|
300
|
+
pd_candles['idmax_long_periods'] > pd_candles['idmin_long_periods'],
|
|
301
|
+
pd_candles['max_long_periods'] - pd_candles['min_long_periods'], # Up swing
|
|
302
|
+
pd_candles['min_long_periods'] - pd_candles['max_long_periods'] # Down swing (negative)
|
|
303
|
+
)
|
|
109
304
|
|
|
305
|
+
pd_candles['trend_from_highs_long_periods'] = np.where(
|
|
306
|
+
pd.isna(pd_candles['max_long_periods']),
|
|
307
|
+
None, # type: ignore
|
|
308
|
+
pd_candles['max_long_periods'].rolling(window=sliding_window_how_many_candles).apply(trend_from_highs, raw=True)
|
|
309
|
+
)
|
|
310
|
+
pd_candles['trend_from_lows_long_periods'] = np.where(
|
|
311
|
+
pd.isna(pd_candles['min_long_periods']),
|
|
312
|
+
None, # type: ignore
|
|
313
|
+
pd_candles['min_long_periods'].rolling(window=sliding_window_how_many_candles).apply(trend_from_lows, raw=True)
|
|
314
|
+
)
|
|
315
|
+
pd_candles['trend_from_highs_short_periods'] = np.where(
|
|
316
|
+
pd.isna(pd_candles['max_short_periods']),
|
|
317
|
+
None, # type: ignore
|
|
318
|
+
pd_candles['max_short_periods'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(trend_from_highs, raw=True)
|
|
319
|
+
)
|
|
320
|
+
pd_candles['trend_from_lows_short_periods'] = np.where(
|
|
321
|
+
pd.isna(pd_candles['min_short_periods']),
|
|
322
|
+
None, # type: ignore
|
|
323
|
+
pd_candles['min_short_periods'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(trend_from_lows, raw=True)
|
|
324
|
+
)
|
|
110
325
|
|
|
111
326
|
# ATR https://medium.com/codex/detecting-ranging-and-trending-markets-with-choppiness-index-in-python-1942e6450b58
|
|
112
327
|
pd_candles.loc[:,'h_l'] = pd_candles['high'] - pd_candles['low']
|
|
@@ -114,7 +329,9 @@ def compute_candles_stats(
|
|
|
114
329
|
pd_candles.loc[:,'l_pc'] = abs(pd_candles['low'] - pd_candles['close'].shift(1))
|
|
115
330
|
pd_candles.loc[:,'tr'] = pd_candles[['h_l', 'h_pc', 'l_pc']].max(axis=1)
|
|
116
331
|
pd_candles.loc[:,'atr'] = pd_candles['tr'].rolling(window=sliding_window_how_many_candles).mean()
|
|
117
|
-
|
|
332
|
+
pd_candles.loc[:,'atr_avg_short_periods'] = pd_candles['atr'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).mean()
|
|
333
|
+
pd_candles.loc[:,'atr_avg_long_periods'] = pd_candles['atr'].rolling(window=sliding_window_how_many_candles).mean()
|
|
334
|
+
|
|
118
335
|
|
|
119
336
|
'''
|
|
120
337
|
@hardcode @todo
|
|
@@ -125,10 +342,10 @@ def compute_candles_stats(
|
|
|
125
342
|
Sometimes you may encounter "Exception has occurred: FloatingPointError invalid value encountered in scalar divide"
|
|
126
343
|
And for example adjusting window size from 120 to 125 will resolve the issue.
|
|
127
344
|
'''
|
|
345
|
+
if not hurst_exp_window_how_many_candles:
|
|
346
|
+
hurst_exp_window_how_many_candles = (sliding_window_how_many_candles if sliding_window_how_many_candles>=125 else 125)
|
|
128
347
|
pd_candles['hurst_exp'] = pd_candles['close'].rolling(
|
|
129
|
-
window=
|
|
130
|
-
hurst_exp_window_how_many_candles if hurst_exp_window_how_many_candles else (sliding_window_how_many_candles if sliding_window_how_many_candles>=125 else 125)
|
|
131
|
-
)
|
|
348
|
+
window=hurst_exp_window_how_many_candles
|
|
132
349
|
).apply(lambda x: compute_Hc(x, kind='price', simplified=True)[0])
|
|
133
350
|
|
|
134
351
|
|
|
@@ -158,14 +375,14 @@ def compute_candles_stats(
|
|
|
158
375
|
first_breach_index = aggressive_mask.idxmax()
|
|
159
376
|
candle_high = pd_candles.at[first_breach_index, 'high']
|
|
160
377
|
candle_low = pd_candles.at[first_breach_index, 'low']
|
|
161
|
-
candle_height = candle_high - candle_low
|
|
378
|
+
candle_height = candle_high - candle_low # type: ignore
|
|
162
379
|
else:
|
|
163
380
|
aggressive_mask = window['close'] <= window['boillenger_lower_agg']
|
|
164
381
|
if aggressive_mask.any():
|
|
165
382
|
first_breach_index = aggressive_mask.idxmax()
|
|
166
383
|
candle_high = pd_candles.at[first_breach_index, 'high']
|
|
167
384
|
candle_low = pd_candles.at[first_breach_index, 'low']
|
|
168
|
-
candle_height = candle_high - candle_low
|
|
385
|
+
candle_height = candle_high - candle_low # type: ignore
|
|
169
386
|
|
|
170
387
|
return {
|
|
171
388
|
'aggressive_move': aggressive_mask.any(),
|
|
@@ -270,10 +487,13 @@ def compute_candles_stats(
|
|
|
270
487
|
mitigated = pd_candles.iloc[idx + 1:row.name]['close'].lt(row['fvg_high']).any()
|
|
271
488
|
return mitigated
|
|
272
489
|
|
|
273
|
-
pd_candles['fvg_mitigated'] = pd_candles.apply(lambda row: compute_fvg_mitigated(row, pd_candles), axis=1)
|
|
274
|
-
|
|
490
|
+
pd_candles['fvg_mitigated'] = pd_candles.apply(lambda row: compute_fvg_mitigated(row, pd_candles), axis=1) # type: ignore
|
|
275
491
|
|
|
276
|
-
|
|
492
|
+
'''
|
|
493
|
+
RSI
|
|
494
|
+
Divergences from Bybit Learn https://www.youtube.com/watch?v=G9oUTi-PI18&t=809s
|
|
495
|
+
RSI Reversals from BK Traders https://www.youtube.com/watch?v=MvkbrHjiQlI
|
|
496
|
+
'''
|
|
277
497
|
pd_candles.loc[:,'close_delta'] = pd_candles['close'].diff()
|
|
278
498
|
pd_candles.loc[:,'close_delta_percent'] = pd_candles['close'].pct_change()
|
|
279
499
|
lo_up = pd_candles['close_delta'].clip(lower=0)
|
|
@@ -284,9 +504,9 @@ def compute_candles_stats(
|
|
|
284
504
|
if rsi_ema == True:
|
|
285
505
|
# Use exponential moving average
|
|
286
506
|
lo_ma_up = lo_up.ewm(
|
|
287
|
-
com =
|
|
507
|
+
com = rsi_sliding_window_how_many_candles -1,
|
|
288
508
|
adjust=True,
|
|
289
|
-
min_periods = rsi_sliding_window_how_many_candles
|
|
509
|
+
min_periods = rsi_sliding_window_how_many_candles).mean()
|
|
290
510
|
lo_ma_down = lo_down.ewm(
|
|
291
511
|
com = (rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles) - 1,
|
|
292
512
|
adjust=True,
|
|
@@ -294,15 +514,57 @@ def compute_candles_stats(
|
|
|
294
514
|
|
|
295
515
|
else:
|
|
296
516
|
# Use simple moving average
|
|
297
|
-
lo_ma_up = lo_up.rolling(window = rsi_sliding_window_how_many_candles
|
|
298
|
-
lo_ma_down = lo_down.rolling(window = rsi_sliding_window_how_many_candles
|
|
517
|
+
lo_ma_up = lo_up.rolling(window = rsi_sliding_window_how_many_candles).mean()
|
|
518
|
+
lo_ma_down = lo_down.rolling(window = rsi_sliding_window_how_many_candles).mean()
|
|
299
519
|
|
|
300
520
|
lo_rs = lo_ma_up / lo_ma_down
|
|
301
521
|
pd_candles.loc[:,'rsi'] = 100 - (100/(1 + lo_rs))
|
|
522
|
+
pd_candles['rsi_bucket'] = pd_candles['rsi'].apply(lambda x: bucketize_val(x, buckets=BUCKETS_m0_100))
|
|
302
523
|
pd_candles['ema_rsi'] = pd_candles['rsi'].ewm(
|
|
303
|
-
span=rsi_sliding_window_how_many_candles
|
|
524
|
+
span=rsi_sliding_window_how_many_candles,
|
|
304
525
|
adjust=False).mean()
|
|
305
526
|
|
|
527
|
+
rsi_rolling = pd_candles['rsi'].rolling(window=int(rsi_trend_sliding_window_how_many_candles))
|
|
528
|
+
pd_candles['rsi_max'] = rsi_rolling.max()
|
|
529
|
+
pd_candles['rsi_idmax'] = rsi_rolling.apply(lambda x : x.idxmax())
|
|
530
|
+
pd_candles['rsi_min'] = rsi_rolling.min()
|
|
531
|
+
pd_candles['rsi_idmin'] = rsi_rolling.apply(lambda x : x.idxmin())
|
|
532
|
+
|
|
533
|
+
def rsi_trend(
|
|
534
|
+
row,
|
|
535
|
+
rsi_upper_threshold : float = 70,
|
|
536
|
+
rsi_lower_threshold : float = 30):
|
|
537
|
+
if pd.isna(row['rsi_idmax']) or pd.isna(row['rsi_idmin']):
|
|
538
|
+
return np.nan
|
|
539
|
+
if row['rsi_idmax'] > row['rsi_idmin']:
|
|
540
|
+
return 'down' if row.name > row['rsi_idmax'] and row['rsi'] <= rsi_upper_threshold else 'up'
|
|
541
|
+
else:
|
|
542
|
+
return 'up' if row.name > row['rsi_idmin'] and row['rsi'] >= rsi_lower_threshold else 'down'
|
|
543
|
+
|
|
544
|
+
pd_candles['rsi_trend'] = pd_candles.apply(lambda row: rsi_trend(row), axis=1)
|
|
545
|
+
|
|
546
|
+
pd_candles['rsi_trend_from_highs'] = np.where(
|
|
547
|
+
pd.isna(pd_candles['rsi_max']),
|
|
548
|
+
None, # type: ignore
|
|
549
|
+
pd_candles['rsi_max'].rolling(window=rsi_trend_sliding_window_how_many_candles).apply(trend_from_highs, raw=True)
|
|
550
|
+
)
|
|
551
|
+
pd_candles['rsi_trend_from_lows'] = np.where(
|
|
552
|
+
pd.isna(pd_candles['rsi_min']),
|
|
553
|
+
None, # type: ignore
|
|
554
|
+
pd_candles['rsi_min'].rolling(window=rsi_trend_sliding_window_how_many_candles).apply(trend_from_lows, raw=True)
|
|
555
|
+
)
|
|
556
|
+
|
|
557
|
+
def _rsi_divergence(row):
|
|
558
|
+
trend_from_highs_long_periods = TrendDirection(row['trend_from_highs_long_periods']) if row['trend_from_highs_long_periods'] is not None and not pd.isna(row['trend_from_highs_long_periods']) else None # type: ignore
|
|
559
|
+
rsi_trend_from_highs = TrendDirection(row['rsi_trend_from_highs']) if row['rsi_trend_from_highs'] is not None and not pd.isna(row['rsi_trend_from_highs']) else None # type: ignore
|
|
560
|
+
|
|
561
|
+
if trend_from_highs_long_periods and rsi_trend_from_highs and trend_from_highs_long_periods == TrendDirection.LOWER_HIGHS and rsi_trend_from_highs == TrendDirection.HIGHER_HIGHS:
|
|
562
|
+
return 'bullish_divergence'
|
|
563
|
+
elif trend_from_highs_long_periods and rsi_trend_from_highs and trend_from_highs_long_periods == TrendDirection.HIGHER_HIGHS and rsi_trend_from_highs == TrendDirection.LOWER_HIGHS:
|
|
564
|
+
return 'bearish_divergence'
|
|
565
|
+
return 'no_divergence'
|
|
566
|
+
pd_candles['rsi_divergence'] = pd_candles.apply(_rsi_divergence, axis=1)
|
|
567
|
+
|
|
306
568
|
|
|
307
569
|
# MFI (Money Flow Index) https://randerson112358.medium.com/algorithmic-trading-strategy-using-money-flow-index-mfi-python-aa46461a5ea5
|
|
308
570
|
pd_candles['typical_price'] = (pd_candles['high'] + pd_candles['low'] + pd_candles['close']) / 3
|
|
@@ -319,59 +581,116 @@ def compute_candles_stats(
|
|
|
319
581
|
rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).sum()
|
|
320
582
|
pd_candles['money_flow_ratio'] = pd_candles['positive_flow_sum'] / pd_candles['negative_flow_sum']
|
|
321
583
|
pd_candles['mfi'] = 100 - (100 / (1 + pd_candles['money_flow_ratio']))
|
|
584
|
+
pd_candles['mfi_bucket'] = pd_candles['mfi'].apply(lambda x: bucketize_val(x, buckets=BUCKETS_m0_100))
|
|
322
585
|
|
|
323
586
|
|
|
324
587
|
# MACD https://www.investopedia.com/terms/m/macd.asp
|
|
588
|
+
# https://www.youtube.com/watch?v=jmPCL3l08ss
|
|
325
589
|
pd_candles['macd'] = pd_candles['ema_short_periods'] - pd_candles['ema_long_periods']
|
|
326
|
-
pd_candles['signal'] = pd_candles['macd'].ewm(span=
|
|
327
|
-
pd_candles['macd_minus_signal'] = pd_candles['macd'] - pd_candles['signal']
|
|
590
|
+
pd_candles['signal'] = pd_candles['macd'].ewm(span=int(sliding_window_how_many_candles/slow_fast_interval_ratio), adjust=False).mean()
|
|
591
|
+
pd_candles['macd_minus_signal'] = pd_candles['macd'] - pd_candles['signal'] # MACD histogram
|
|
592
|
+
macd_cur = pd_candles['macd_minus_signal']
|
|
593
|
+
macd_prev = pd_candles['macd_minus_signal'].shift(1)
|
|
594
|
+
bullish_macd_crosses = (macd_prev < 0) & (macd_cur > 0)
|
|
595
|
+
bearish_macd_crosses = (macd_prev > 0) & (macd_cur < 0)
|
|
596
|
+
pd_candles.loc[bullish_macd_crosses, 'macd_cross'] = 1
|
|
597
|
+
pd_candles.loc[bearish_macd_crosses, 'macd_cross'] = -1
|
|
598
|
+
bullish_indices = pd.Series(pd_candles.index.where(pd_candles['macd_cross'] == 1), index=pd_candles.index).astype('Int64')
|
|
599
|
+
bearish_indices = pd.Series(pd_candles.index.where(pd_candles['macd_cross'] == -1), index=pd_candles.index).astype('Int64')
|
|
600
|
+
pd_candles['macd_bullish_cross_last_id'] = bullish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
601
|
+
pd_candles['macd_bearish_cross_last_id'] = bearish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
602
|
+
conditions = [
|
|
603
|
+
(pd_candles['macd_bullish_cross_last_id'].notna() &
|
|
604
|
+
pd_candles['macd_bearish_cross_last_id'].notna() &
|
|
605
|
+
(pd_candles['macd_bullish_cross_last_id'] > pd_candles['macd_bearish_cross_last_id'])),
|
|
606
|
+
|
|
607
|
+
(pd_candles['macd_bullish_cross_last_id'].notna() &
|
|
608
|
+
pd_candles['macd_bearish_cross_last_id'].notna() &
|
|
609
|
+
(pd_candles['macd_bearish_cross_last_id'] > pd_candles['macd_bullish_cross_last_id'])),
|
|
610
|
+
|
|
611
|
+
(pd_candles['macd_bullish_cross_last_id'].notna() &
|
|
612
|
+
pd_candles['macd_bearish_cross_last_id'].isna()),
|
|
613
|
+
|
|
614
|
+
(pd_candles['macd_bearish_cross_last_id'].notna() &
|
|
615
|
+
pd_candles['macd_bullish_cross_last_id'].isna())
|
|
616
|
+
]
|
|
617
|
+
choices = ['bullish', 'bearish', 'bullish', 'bearish']
|
|
618
|
+
pd_candles['macd_cross_last'] = np.select(conditions, choices, default=None) # type: ignore
|
|
619
|
+
pd_candles.loc[bullish_macd_crosses, 'macd_cross'] = 'bullish'
|
|
620
|
+
pd_candles.loc[bearish_macd_crosses, 'macd_cross'] = 'bearish'
|
|
328
621
|
|
|
329
622
|
if not pypy_compat:
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
623
|
+
calculate_slope(
|
|
624
|
+
pd_data=pd_candles,
|
|
625
|
+
src_col_name='close',
|
|
626
|
+
slope_col_name='close_short_slope',
|
|
627
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles/slow_fast_interval_ratio)
|
|
628
|
+
)
|
|
336
629
|
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
630
|
+
calculate_slope(
|
|
631
|
+
pd_data=pd_candles,
|
|
632
|
+
src_col_name='close',
|
|
633
|
+
slope_col_name='close_long_slope',
|
|
634
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
635
|
+
)
|
|
340
636
|
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
637
|
+
calculate_slope(
|
|
638
|
+
pd_data=pd_candles,
|
|
639
|
+
src_col_name='ema_short_periods',
|
|
640
|
+
slope_col_name='ema_short_slope',
|
|
641
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles/slow_fast_interval_ratio)
|
|
642
|
+
)
|
|
344
643
|
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
644
|
+
calculate_slope(
|
|
645
|
+
pd_data=pd_candles,
|
|
646
|
+
src_col_name='ema_long_periods',
|
|
647
|
+
slope_col_name='ema_long_slope',
|
|
648
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
649
|
+
)
|
|
348
650
|
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
651
|
+
calculate_slope(
|
|
652
|
+
pd_data=pd_candles,
|
|
653
|
+
src_col_name='boillenger_upper',
|
|
654
|
+
slope_col_name='boillenger_upper_slope',
|
|
655
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
656
|
+
)
|
|
352
657
|
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
658
|
+
calculate_slope(
|
|
659
|
+
pd_data=pd_candles,
|
|
660
|
+
src_col_name='boillenger_lower',
|
|
661
|
+
slope_col_name='boillenger_lower_slope',
|
|
662
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
663
|
+
)
|
|
356
664
|
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
665
|
+
calculate_slope(
|
|
666
|
+
pd_data=pd_candles,
|
|
667
|
+
src_col_name='ema_rsi',
|
|
668
|
+
slope_col_name='ema_rsi_slope',
|
|
669
|
+
sliding_window_how_many_candles=int(rsi_trend_sliding_window_how_many_candles)
|
|
670
|
+
)
|
|
360
671
|
|
|
361
672
|
pd_candles['regular_divergence'] = (
|
|
362
673
|
(pd_candles['ema_long_slope'] > 0) & (pd_candles['ema_rsi_slope'] < 0) |
|
|
363
674
|
(pd_candles['ema_long_slope'] < 0) & (pd_candles['ema_rsi_slope'] > 0)
|
|
364
675
|
)
|
|
676
|
+
|
|
677
|
+
calculate_slope(
|
|
678
|
+
pd_data=pd_candles,
|
|
679
|
+
src_col_name='hurst_exp',
|
|
680
|
+
slope_col_name='hurst_exp_slope',
|
|
681
|
+
sliding_window_how_many_candles=hurst_exp_window_how_many_candles
|
|
682
|
+
)
|
|
365
683
|
|
|
366
684
|
|
|
367
685
|
# Fibonacci
|
|
368
|
-
|
|
369
|
-
pd_candles['
|
|
370
|
-
pd_candles['fib_618_long_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_long_periods'], rw['idmin_long_periods'], rw['max_long_periods'], rw['idmax_long_periods'], TARGET_FIB_LEVEL), axis=1)
|
|
686
|
+
pd_candles[f'fib_{target_fib_level}_short_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_short_periods'], rw['idmin_short_periods'], rw['max_short_periods'], rw['idmax_short_periods'], target_fib_level), axis=1)
|
|
687
|
+
pd_candles[f'fib_{target_fib_level}_long_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_long_periods'], rw['idmin_long_periods'], rw['max_long_periods'], rw['idmax_long_periods'], target_fib_level), axis=1)
|
|
371
688
|
|
|
372
689
|
|
|
373
690
|
# Inflection points
|
|
374
691
|
pd_candles['gap_close_vs_ema'] = pd_candles['close'] - pd_candles['ema_long_periods']
|
|
692
|
+
pd_candles['gap_close_vs_ema_percent'] = pd_candles['gap_close_vs_ema']/pd_candles['close'] *100
|
|
693
|
+
|
|
375
694
|
pd_candles['close_above_or_below_ema'] = None
|
|
376
695
|
pd_candles.loc[pd_candles['gap_close_vs_ema'] > 0, 'close_above_or_below_ema'] = 'above'
|
|
377
696
|
pd_candles.loc[pd_candles['gap_close_vs_ema'] < 0, 'close_above_or_below_ema'] = 'below'
|
|
@@ -381,6 +700,68 @@ def compute_candles_stats(
|
|
|
381
700
|
'close_vs_ema_inflection'
|
|
382
701
|
] = np.sign(pd_candles['close'] - pd_candles['ema_long_periods'])
|
|
383
702
|
|
|
703
|
+
def lookup_fib_target(
|
|
704
|
+
row,
|
|
705
|
+
pd_candles,
|
|
706
|
+
target_fib_level : float = 0.618
|
|
707
|
+
) -> Union[Dict, None]:
|
|
708
|
+
if row is None:
|
|
709
|
+
return None
|
|
710
|
+
|
|
711
|
+
fib_target_short_periods = None
|
|
712
|
+
fib_target_long_periods = None
|
|
713
|
+
|
|
714
|
+
max_short_periods = row['max_short_periods']
|
|
715
|
+
idmax_short_periods = int(row['idmax_short_periods']) if not math.isnan(row['idmax_short_periods']) else None
|
|
716
|
+
max_long_periods = row['max_long_periods']
|
|
717
|
+
idmax_long_periods = int(row['idmax_long_periods']) if not math.isnan(row['idmax_long_periods']) else None
|
|
718
|
+
|
|
719
|
+
min_short_periods = row['min_short_periods']
|
|
720
|
+
idmin_short_periods = int(row['idmin_short_periods']) if not math.isnan(row['idmin_short_periods']) else None
|
|
721
|
+
min_long_periods = row['min_long_periods']
|
|
722
|
+
idmin_long_periods = int(row['idmin_long_periods']) if not math.isnan(row['idmin_long_periods']) else None
|
|
723
|
+
|
|
724
|
+
if idmax_short_periods and idmin_short_periods and idmax_short_periods>0 and idmin_short_periods>0:
|
|
725
|
+
if idmax_short_periods>idmin_short_periods and idmax_short_periods < len(pd_candles):
|
|
726
|
+
# Falling from prev peak
|
|
727
|
+
last_peak = pd_candles.iloc[idmax_short_periods]
|
|
728
|
+
fib_target_short_periods = last_peak[f'fib_{target_fib_level}_short_periods'] if not math.isnan(last_peak[f'fib_{target_fib_level}_short_periods']) else None
|
|
729
|
+
|
|
730
|
+
else:
|
|
731
|
+
# Bouncing from prev bottom
|
|
732
|
+
if idmin_short_periods < len(pd_candles):
|
|
733
|
+
last_bottom = pd_candles.iloc[idmin_short_periods]
|
|
734
|
+
fib_target_short_periods = last_bottom[f'fib_{target_fib_level}_short_periods'] if not math.isnan(last_bottom[f'fib_{target_fib_level}_short_periods']) else None
|
|
735
|
+
|
|
736
|
+
if idmax_long_periods and idmin_long_periods and idmax_long_periods>0 and idmin_long_periods>0:
|
|
737
|
+
if idmax_long_periods>idmin_long_periods and idmax_long_periods < len(pd_candles):
|
|
738
|
+
# Falling from prev peak
|
|
739
|
+
last_peak = pd_candles.iloc[idmax_long_periods]
|
|
740
|
+
fib_target_long_periods = last_peak[f'fib_{target_fib_level}_long_periods'] if not math.isnan(last_peak[f'fib_{target_fib_level}_long_periods']) else None
|
|
741
|
+
|
|
742
|
+
else:
|
|
743
|
+
# Bouncing from prev bottom
|
|
744
|
+
if idmin_long_periods < len(pd_candles):
|
|
745
|
+
last_bottom = pd_candles.iloc[idmin_long_periods]
|
|
746
|
+
fib_target_long_periods = last_bottom[f'fib_{target_fib_level}_long_periods'] if not math.isnan(last_bottom[f'fib_{target_fib_level}_long_periods']) else None
|
|
747
|
+
|
|
748
|
+
return {
|
|
749
|
+
'short_periods' : {
|
|
750
|
+
'idmin' : idmin_short_periods,
|
|
751
|
+
'idmax' : idmax_short_periods,
|
|
752
|
+
'min' : min_short_periods,
|
|
753
|
+
'max' : max_short_periods,
|
|
754
|
+
'fib_target' : fib_target_short_periods,
|
|
755
|
+
},
|
|
756
|
+
'long_periods' : {
|
|
757
|
+
'idmin' : idmin_long_periods,
|
|
758
|
+
'idmax' : idmax_long_periods,
|
|
759
|
+
'min' : min_long_periods,
|
|
760
|
+
'max' : max_long_periods,
|
|
761
|
+
'fib_target' : fib_target_long_periods
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
|
|
384
765
|
'''
|
|
385
766
|
The implementation from Geeksforgeeks https://www.geeksforgeeks.org/find-indices-of-all-local-maxima-and-local-minima-in-an-array/ is wrong.
|
|
386
767
|
If you have consecutive-duplicates, things will gall apart!
|
|
@@ -790,3 +1171,40 @@ def partition_sliding_window(
|
|
|
790
1171
|
'maxima' : maxima,
|
|
791
1172
|
'segments' : consolidated_segements
|
|
792
1173
|
}
|
|
1174
|
+
|
|
1175
|
+
# This relies on statsmodels.api, which is not pypy compatible
|
|
1176
|
+
def compute_pair_stats(
|
|
1177
|
+
pd_candles : pd.DataFrame,
|
|
1178
|
+
how_many_candles : int = 24*7
|
|
1179
|
+
) -> None:
|
|
1180
|
+
import statsmodels.api as sm
|
|
1181
|
+
|
|
1182
|
+
def _compute_hedge_ratio(
|
|
1183
|
+
prices0 : List[float],
|
|
1184
|
+
prices1 : List[float]
|
|
1185
|
+
):
|
|
1186
|
+
model = sm.OLS(prices0, prices1).fit()
|
|
1187
|
+
hedge_ratio = model.params[0]
|
|
1188
|
+
return hedge_ratio
|
|
1189
|
+
|
|
1190
|
+
pd_candles['hedge_ratio'] = np.nan
|
|
1191
|
+
for j in range(how_many_candles, pd_candles.shape[0]):
|
|
1192
|
+
window = pd_candles.iloc[j-how_many_candles:j]
|
|
1193
|
+
hedge_ratio = _compute_hedge_ratio(window['close_1'].values, window['close_2'].values) # type: ignore
|
|
1194
|
+
pd_candles.loc[j, 'hedge_ratio'] = hedge_ratio
|
|
1195
|
+
|
|
1196
|
+
pd_candles['close_spread'] = pd_candles['close_1'] - (pd_candles['close_2'] * pd_candles['hedge_ratio']) # You're fitting one hedge_ratio over a windows
|
|
1197
|
+
mean = pd_candles['close_spread'].rolling(how_many_candles).mean()
|
|
1198
|
+
std = pd_candles['close_spread'].rolling(how_many_candles).std()
|
|
1199
|
+
pd_candles['close_spread_mean'] = mean
|
|
1200
|
+
pd_candles['close_spread_std'] = std
|
|
1201
|
+
pd_candles['zscore_close_spread'] = (pd_candles['close_spread'] - mean)/std
|
|
1202
|
+
pd_candles['zscore_close_spread_min'] = pd_candles['zscore_close_spread'].rolling(how_many_candles).min()
|
|
1203
|
+
pd_candles['zscore_close_spread_max'] = pd_candles['zscore_close_spread'].rolling(how_many_candles).max()
|
|
1204
|
+
|
|
1205
|
+
calculate_slope(
|
|
1206
|
+
pd_data=pd_candles,
|
|
1207
|
+
src_col_name='zscore_close_spread',
|
|
1208
|
+
slope_col_name='zscore_slope',
|
|
1209
|
+
sliding_window_how_many_candles=how_many_candles
|
|
1210
|
+
)
|