siglab-py 0.1.29__py3-none-any.whl → 0.6.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siglab-py might be problematic. Click here for more details.
- siglab_py/constants.py +26 -1
- siglab_py/exchanges/binance.py +38 -0
- siglab_py/exchanges/deribit.py +83 -0
- siglab_py/exchanges/futubull.py +12 -2
- siglab_py/market_data_providers/candles_provider.py +2 -2
- siglab_py/market_data_providers/candles_ta_provider.py +3 -3
- siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +4 -4
- siglab_py/market_data_providers/futu_candles_ta_to_csv.py +7 -2
- siglab_py/market_data_providers/google_monitor.py +320 -0
- siglab_py/market_data_providers/orderbooks_provider.py +15 -12
- siglab_py/market_data_providers/tg_monitor.py +428 -0
- siglab_py/market_data_providers/{test_provider.py → trigger_provider.py} +9 -8
- siglab_py/ordergateway/client.py +172 -41
- siglab_py/ordergateway/encrypt_keys_util.py +1 -1
- siglab_py/ordergateway/gateway.py +456 -347
- siglab_py/ordergateway/test_ordergateway.py +8 -7
- siglab_py/tests/integration/market_data_util_tests.py +35 -1
- siglab_py/tests/unit/analytic_util_tests.py +47 -12
- siglab_py/tests/unit/simple_math_tests.py +235 -0
- siglab_py/tests/unit/trading_util_tests.py +65 -0
- siglab_py/util/analytic_util.py +478 -69
- siglab_py/util/market_data_util.py +487 -100
- siglab_py/util/notification_util.py +78 -0
- siglab_py/util/retry_util.py +11 -3
- siglab_py/util/simple_math.py +240 -0
- siglab_py/util/slack_notification_util.py +59 -0
- siglab_py/util/trading_util.py +118 -0
- {siglab_py-0.1.29.dist-info → siglab_py-0.6.12.dist-info}/METADATA +5 -9
- siglab_py-0.6.12.dist-info/RECORD +44 -0
- {siglab_py-0.1.29.dist-info → siglab_py-0.6.12.dist-info}/WHEEL +1 -1
- siglab_py-0.1.29.dist-info/RECORD +0 -34
- {siglab_py-0.1.29.dist-info → siglab_py-0.6.12.dist-info}/top_level.txt +0 -0
siglab_py/util/analytic_util.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import tzlocal
|
|
2
2
|
from datetime import datetime, timezone
|
|
3
3
|
from typing import List, Dict, Union, NoReturn, Any, Tuple
|
|
4
|
+
from enum import Enum
|
|
4
5
|
from pathlib import Path
|
|
5
6
|
import math
|
|
6
7
|
import pandas as pd
|
|
@@ -10,7 +11,47 @@ from hurst import compute_Hc # compatible with pypy
|
|
|
10
11
|
from ccxt.base.exchange import Exchange as CcxtExchange
|
|
11
12
|
from ccxt import deribit
|
|
12
13
|
|
|
14
|
+
from siglab_py.util.simple_math import bucket_series, bucketize_val
|
|
13
15
|
from siglab_py.util.market_data_util import fix_column_types
|
|
16
|
+
from siglab_py.constants import TrendDirection
|
|
17
|
+
|
|
18
|
+
def classify_candle(
|
|
19
|
+
candle : pd.Series,
|
|
20
|
+
min_candle_height_ratio : float = 5,
|
|
21
|
+
distance_from_mid_doji_threshold_bps : float = 10
|
|
22
|
+
) -> Union[str, None]:
|
|
23
|
+
candle_class : Union[str, None] = None
|
|
24
|
+
open = candle['open']
|
|
25
|
+
high = candle['high']
|
|
26
|
+
low = candle['low']
|
|
27
|
+
close = candle['close']
|
|
28
|
+
candle_full_height = high - low # always positive
|
|
29
|
+
candle_body_height = close - open # can be negative
|
|
30
|
+
candle_full_mid = (high + low)/2
|
|
31
|
+
candle_body_mid = (open + close)/2
|
|
32
|
+
distance_from_mid_bps = (candle_full_mid/candle_body_mid -1)*10000 if candle_full_mid>candle_body_mid else (candle_body_mid/candle_full_mid -1)*10000
|
|
33
|
+
|
|
34
|
+
candle_height_ratio = candle_full_height / abs(candle_body_height) if candle_body_height!=0 else float('inf')
|
|
35
|
+
|
|
36
|
+
if (
|
|
37
|
+
candle_height_ratio>=min_candle_height_ratio
|
|
38
|
+
and close>low
|
|
39
|
+
):
|
|
40
|
+
candle_class = 'hammer'
|
|
41
|
+
elif (
|
|
42
|
+
candle_height_ratio>=min_candle_height_ratio
|
|
43
|
+
and close<high
|
|
44
|
+
):
|
|
45
|
+
candle_class = 'shooting_star'
|
|
46
|
+
elif(
|
|
47
|
+
candle_height_ratio>=min_candle_height_ratio
|
|
48
|
+
and distance_from_mid_bps<=distance_from_mid_doji_threshold_bps
|
|
49
|
+
):
|
|
50
|
+
candle_class = 'doji'
|
|
51
|
+
|
|
52
|
+
# Keep add more ...
|
|
53
|
+
|
|
54
|
+
return candle_class
|
|
14
55
|
|
|
15
56
|
# Fibonacci
|
|
16
57
|
MAGIC_FIB_LEVELS = [0, 0.236, 0.382, 0.5, 0.618, 0.786, 1.00, 1.618, 2.618, 3.618, 4.236]
|
|
@@ -32,17 +73,64 @@ def estimate_fib_retracement(
|
|
|
32
73
|
|
|
33
74
|
return retracement_price
|
|
34
75
|
|
|
76
|
+
def calculate_slope(
|
|
77
|
+
pd_data : pd.DataFrame,
|
|
78
|
+
src_col_name : str,
|
|
79
|
+
slope_col_name : str,
|
|
80
|
+
sliding_window_how_many_candles : int
|
|
81
|
+
):
|
|
82
|
+
import statsmodels.api as sm # in-compatible with pypy
|
|
83
|
+
|
|
84
|
+
X = sm.add_constant(range(len(pd_data[src_col_name])))
|
|
85
|
+
rolling_slope = pd_data[src_col_name].rolling(window=sliding_window_how_many_candles).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
86
|
+
pd_data[slope_col_name] = rolling_slope
|
|
87
|
+
max_abs_slope = pd_data[slope_col_name].abs().rolling(window=sliding_window_how_many_candles).max()
|
|
88
|
+
pd_data[f"normalized_{slope_col_name}"] = pd_data[slope_col_name] / max_abs_slope
|
|
89
|
+
normalized_slope_rolling = pd_data[f"normalized_{slope_col_name}"].rolling(window=sliding_window_how_many_candles)
|
|
90
|
+
pd_data[f"normalized_{slope_col_name}_min"] = normalized_slope_rolling.min()
|
|
91
|
+
pd_data[f"normalized_{slope_col_name}_max"] = normalized_slope_rolling.max()
|
|
92
|
+
pd_data[f"normalized_{slope_col_name}_idmin"] = normalized_slope_rolling.apply(lambda x : x.idxmin())
|
|
93
|
+
pd_data[f"normalized_{slope_col_name}_idmax"] = normalized_slope_rolling.apply(lambda x : x.idxmax())
|
|
94
|
+
|
|
95
|
+
def trend_from_highs(series: np.ndarray) -> float:
|
|
96
|
+
valid_series = series[~np.isnan(series)]
|
|
97
|
+
unique_maxima = valid_series[np.concatenate(([True], np.diff(valid_series) != 0))]
|
|
98
|
+
if len(unique_maxima) < 2:
|
|
99
|
+
return TrendDirection.UNDEFINED.value
|
|
100
|
+
first, last = unique_maxima[0], unique_maxima[-1]
|
|
101
|
+
if first > last:
|
|
102
|
+
return TrendDirection.LOWER_HIGHS.value
|
|
103
|
+
elif first < last:
|
|
104
|
+
return TrendDirection.HIGHER_HIGHS.value
|
|
105
|
+
else:
|
|
106
|
+
return TrendDirection.SIDEWAYS.value
|
|
107
|
+
|
|
108
|
+
def trend_from_lows(series: np.ndarray) -> float:
|
|
109
|
+
valid_series = series[~np.isnan(series)]
|
|
110
|
+
unique_minima = valid_series[np.concatenate(([True], np.diff(valid_series) != 0))]
|
|
111
|
+
if len(unique_minima) < 2:
|
|
112
|
+
return TrendDirection.UNDEFINED.value
|
|
113
|
+
first, last = unique_minima[0], unique_minima[-1]
|
|
114
|
+
if first > last:
|
|
115
|
+
return TrendDirection.LOWER_LOWS.value
|
|
116
|
+
elif first < last:
|
|
117
|
+
return TrendDirection.HIGHER_LOWS.value
|
|
118
|
+
else:
|
|
119
|
+
return TrendDirection.SIDEWAYS.value
|
|
120
|
+
|
|
121
|
+
|
|
35
122
|
'''
|
|
36
123
|
compute_candles_stats will calculate typical/basic technical indicators using in many trading strategies:
|
|
37
124
|
a. Basic SMA/EMAs (And slopes)
|
|
38
|
-
b.
|
|
39
|
-
c.
|
|
40
|
-
d.
|
|
41
|
-
e.
|
|
42
|
-
f.
|
|
43
|
-
g.
|
|
44
|
-
h.
|
|
45
|
-
i.
|
|
125
|
+
b. EMA crosses
|
|
126
|
+
c. ATR
|
|
127
|
+
d. Boillenger bands (Yes incorrect spelling sorry)
|
|
128
|
+
e. FVG
|
|
129
|
+
f. Hurst Exponent
|
|
130
|
+
g. RSI, MFI
|
|
131
|
+
h. MACD
|
|
132
|
+
i. Fibonacci
|
|
133
|
+
j. Inflections points: where 'close' crosses EMA from above or below.
|
|
46
134
|
|
|
47
135
|
Parameters:
|
|
48
136
|
a. boillenger_std_multiples: For boillenger upper and lower calc
|
|
@@ -64,12 +152,21 @@ def compute_candles_stats(
|
|
|
64
152
|
rsi_ema : bool = True,
|
|
65
153
|
boillenger_ema : bool = False,
|
|
66
154
|
slow_fast_interval_ratio : float = 3,
|
|
67
|
-
rsi_sliding_window_how_many_candles :
|
|
155
|
+
rsi_sliding_window_how_many_candles : int = 14, # RSI standard 14
|
|
156
|
+
rsi_trend_sliding_window_how_many_candles : int = 24*7, # This is for purpose of RSI trend identification (Locating local peaks/troughs in RSI). This should typically be multiples of 'rsi_sliding_window_how_many_candles'.
|
|
68
157
|
hurst_exp_window_how_many_candles : Union[int, None] = None, # Hurst exp standard 100-200
|
|
69
158
|
boillenger_std_multiples_for_aggressive_moves_detect : int = 3, # Aggressive moves if candle low/high breaches boillenger bands from 3 standard deviations.
|
|
159
|
+
target_fib_level : float = 0.618,
|
|
70
160
|
pypy_compat : bool = True
|
|
71
161
|
):
|
|
162
|
+
BUCKETS_m0_100 = bucket_series(
|
|
163
|
+
values=list([i for i in range(0,100)]),
|
|
164
|
+
outlier_threshold_percent=10,
|
|
165
|
+
level_granularity=0.1
|
|
166
|
+
)
|
|
167
|
+
|
|
72
168
|
pd_candles['candle_height'] = pd_candles['high'] - pd_candles['low']
|
|
169
|
+
pd_candles['candle_body_height'] = pd_candles['close'] - pd_candles['open']
|
|
73
170
|
|
|
74
171
|
'''
|
|
75
172
|
market_data_gizmo inserted dummy lines --> Need exclude those or "TypeError: unorderable types for comparison": pd_btc_candles = pd_btc_candles[pd_btc_candles.close.notnull()]
|
|
@@ -86,36 +183,145 @@ def compute_candles_stats(
|
|
|
86
183
|
|
|
87
184
|
pd_candles['is_green'] = pd_candles['close'] >= pd_candles['open']
|
|
88
185
|
|
|
186
|
+
pd_candles['candle_class'] = pd_candles.apply(lambda row: classify_candle(row), axis=1) # type: ignore
|
|
187
|
+
|
|
188
|
+
close_short_periods_rolling = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio))
|
|
189
|
+
close_long_periods_rolling = pd_candles['close'].rolling(window=sliding_window_how_many_candles)
|
|
190
|
+
close_short_periods_ewm = pd_candles['close'].ewm(span=int(sliding_window_how_many_candles/slow_fast_interval_ratio), adjust=False)
|
|
191
|
+
close_long_periods_ewm = pd_candles['close'].ewm(span=sliding_window_how_many_candles, adjust=False)
|
|
192
|
+
|
|
89
193
|
pd_candles['pct_change_close'] = pd_candles['close'].pct_change() * 100
|
|
90
|
-
pd_candles['sma_short_periods'] =
|
|
91
|
-
pd_candles['sma_long_periods'] =
|
|
92
|
-
pd_candles['ema_short_periods'] =
|
|
93
|
-
pd_candles['ema_long_periods'] =
|
|
194
|
+
pd_candles['sma_short_periods'] = close_short_periods_rolling.mean()
|
|
195
|
+
pd_candles['sma_long_periods'] = close_long_periods_rolling.mean()
|
|
196
|
+
pd_candles['ema_short_periods'] = close_short_periods_ewm.mean()
|
|
197
|
+
pd_candles['ema_long_periods'] = close_long_periods_ewm.mean()
|
|
94
198
|
pd_candles['ema_close'] = pd_candles['ema_long_periods'] # Alias, shorter name
|
|
95
|
-
pd_candles['std'] =
|
|
96
|
-
|
|
199
|
+
pd_candles['std'] = close_long_periods_rolling.std()
|
|
97
200
|
pd_candles['std_percent'] = pd_candles['std'] / pd_candles['ema_close'] * 100
|
|
201
|
+
|
|
202
|
+
pd_candles['vwap_short_periods'] = (pd_candles['close'] * pd_candles['volume']).rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).sum() / pd_candles['volume'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).sum()
|
|
203
|
+
pd_candles['vwap_long_periods'] = (pd_candles['close'] * pd_candles['volume']).rolling(window=sliding_window_how_many_candles).sum() / pd_candles['volume'].rolling(window=sliding_window_how_many_candles).sum()
|
|
204
|
+
|
|
98
205
|
pd_candles['candle_height_percent'] = pd_candles['candle_height'] / pd_candles['ema_close'] * 100
|
|
99
|
-
pd_candles['candle_height_percent_rounded'] = pd_candles['candle_height_percent'].round().astype(
|
|
206
|
+
pd_candles['candle_height_percent_rounded'] = pd_candles['candle_height_percent'].round().astype('Int64')
|
|
207
|
+
|
|
208
|
+
pd_candles['candle_body_height_percent'] = pd_candles['candle_body_height'] / pd_candles['ema_close'] * 100
|
|
209
|
+
pd_candles['candle_body_height_percent_rounded'] = pd_candles['candle_body_height_percent'].round().astype('Int64')
|
|
210
|
+
|
|
211
|
+
'''
|
|
212
|
+
To annualize volatility:
|
|
213
|
+
if candle_interval == '1m':
|
|
214
|
+
annualization_factor = np.sqrt(365 * 24 * 60) # 1-minute candles
|
|
215
|
+
elif candle_interval == '1h':
|
|
216
|
+
annualization_factor = np.sqrt(365 * 24) # 1-hour candles
|
|
217
|
+
elif candle_interval == '1d':
|
|
218
|
+
annualization_factor = np.sqrt(365) # 1-day candles
|
|
219
|
+
pd_candles['annualized_volatility'] = (
|
|
220
|
+
pd_candles['interval_historical_volatility'] * annualization_factor
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
Why log return? Trading Dude https://python.plainenglish.io/stop-using-percentage-returns-logarithmic-returns-explained-with-code-64a4634b883a
|
|
224
|
+
'''
|
|
225
|
+
pd_candles['log_return'] = np.log(pd_candles['close'] / pd_candles['close'].shift(1))
|
|
226
|
+
pd_candles['interval_hist_vol'] = pd_candles['log_return'].rolling(window=sliding_window_how_many_candles).std()
|
|
227
|
+
|
|
228
|
+
time_gap_sec = int(pd_candles['timestamp_ms'].iloc[1] - pd_candles['timestamp_ms'].iloc[0])/1000
|
|
229
|
+
seconds_in_year = 365 * 24 * 60 * 60
|
|
230
|
+
candles_per_year = seconds_in_year / time_gap_sec
|
|
231
|
+
annualization_factor = np.sqrt(candles_per_year)
|
|
232
|
+
pd_candles['annualized_hist_vol'] = pd_candles['interval_hist_vol'] * annualization_factor
|
|
100
233
|
|
|
101
234
|
pd_candles['chop_against_ema'] = (
|
|
102
|
-
(pd_candles['is_green'] & (pd_candles['close'] > pd_candles['ema_close'])) | # Case 1: Green candle and close > EMA
|
|
103
|
-
(
|
|
235
|
+
(~pd_candles['is_green'] & (pd_candles['close'] > pd_candles['ema_close'])) | # Case 1: Green candle and close > EMA
|
|
236
|
+
(pd_candles['is_green'] & (pd_candles['close'] < pd_candles['ema_close'])) # Case 2: Red candle and close < EMA
|
|
104
237
|
)
|
|
105
238
|
|
|
106
239
|
pd_candles['ema_volume_short_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles/slow_fast_interval_ratio, adjust=False).mean()
|
|
107
240
|
pd_candles['ema_volume_long_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles, adjust=False).mean()
|
|
108
241
|
|
|
109
|
-
pd_candles['
|
|
110
|
-
pd_candles['
|
|
111
|
-
pd_candles['
|
|
112
|
-
pd_candles['
|
|
242
|
+
pd_candles['ema_cross'] = None
|
|
243
|
+
pd_candles['ema_cross_last'] = None
|
|
244
|
+
pd_candles['ema_bullish_cross_last_id'] = None
|
|
245
|
+
pd_candles['ema_bearish_cross_last_id'] = None
|
|
246
|
+
ema_short_periods_prev = pd_candles['ema_short_periods'].shift(1)
|
|
247
|
+
ema_long_periods_prev = pd_candles['ema_long_periods'].shift(1)
|
|
248
|
+
ema_short_periods_curr = pd_candles['ema_short_periods']
|
|
249
|
+
ema_long_periods_curr = pd_candles['ema_long_periods']
|
|
250
|
+
bullish_ema_crosses = (ema_short_periods_prev <= ema_long_periods_prev) & (ema_short_periods_curr > ema_long_periods_curr)
|
|
251
|
+
bearish_ema_crosses = (ema_short_periods_prev >= ema_long_periods_prev) & (ema_short_periods_curr < ema_long_periods_curr)
|
|
252
|
+
pd_candles.loc[bullish_ema_crosses, 'ema_cross'] = 1
|
|
253
|
+
pd_candles.loc[bearish_ema_crosses, 'ema_cross'] = -1
|
|
254
|
+
bullish_indices = pd.Series(pd_candles.index.where(pd_candles['ema_cross'] == 1), index=pd_candles.index).astype('Int64')
|
|
255
|
+
bearish_indices = pd.Series(pd_candles.index.where(pd_candles['ema_cross'] == -1), index=pd_candles.index).astype('Int64')
|
|
256
|
+
pd_candles['ema_bullish_cross_last_id'] = bullish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
257
|
+
pd_candles['ema_bearish_cross_last_id'] = bearish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
258
|
+
conditions = [
|
|
259
|
+
(pd_candles['ema_bullish_cross_last_id'].notna() &
|
|
260
|
+
pd_candles['ema_bearish_cross_last_id'].notna() &
|
|
261
|
+
(pd_candles['ema_bullish_cross_last_id'] > pd_candles['ema_bearish_cross_last_id'])),
|
|
262
|
+
|
|
263
|
+
(pd_candles['ema_bullish_cross_last_id'].notna() &
|
|
264
|
+
pd_candles['ema_bearish_cross_last_id'].notna() &
|
|
265
|
+
(pd_candles['ema_bearish_cross_last_id'] > pd_candles['ema_bullish_cross_last_id'])),
|
|
266
|
+
|
|
267
|
+
(pd_candles['ema_bullish_cross_last_id'].notna() &
|
|
268
|
+
pd_candles['ema_bearish_cross_last_id'].isna()),
|
|
269
|
+
|
|
270
|
+
(pd_candles['ema_bearish_cross_last_id'].notna() &
|
|
271
|
+
pd_candles['ema_bullish_cross_last_id'].isna())
|
|
272
|
+
]
|
|
273
|
+
choices = ['bullish', 'bearish', 'bullish', 'bearish']
|
|
274
|
+
pd_candles['ema_cross_last'] = np.select(conditions, choices, default=None) # type: ignore
|
|
275
|
+
pd_candles.loc[bullish_ema_crosses, 'ema_cross'] = 'bullish'
|
|
276
|
+
pd_candles.loc[bearish_ema_crosses, 'ema_cross'] = 'bearish'
|
|
277
|
+
|
|
278
|
+
pd_candles['max_short_periods'] = close_short_periods_rolling.max()
|
|
279
|
+
pd_candles['max_long_periods'] = close_long_periods_rolling.max()
|
|
280
|
+
pd_candles['idmax_short_periods'] = close_short_periods_rolling.apply(lambda x : x.idxmax())
|
|
281
|
+
pd_candles['idmax_long_periods'] = close_long_periods_rolling.apply(lambda x : x.idxmax())
|
|
282
|
+
|
|
283
|
+
pd_candles['min_short_periods'] = close_short_periods_rolling.min()
|
|
284
|
+
pd_candles['min_long_periods'] = close_long_periods_rolling.min()
|
|
285
|
+
pd_candles['idmin_short_periods'] = close_short_periods_rolling.apply(lambda x : x.idxmin())
|
|
286
|
+
pd_candles['idmin_long_periods'] = close_long_periods_rolling.apply(lambda x : x.idxmin())
|
|
287
|
+
|
|
288
|
+
pd_candles['max_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).max()
|
|
289
|
+
pd_candles['idmax_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).apply(lambda x : x.idxmax())
|
|
290
|
+
pd_candles['min_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).min()
|
|
291
|
+
pd_candles['idmin_candle_body_height_percent_long_periods'] = pd_candles['candle_body_height_percent'].rolling(window=sliding_window_how_many_candles).apply(lambda x : x.idxmin())
|
|
292
|
+
|
|
293
|
+
pd_candles['price_swing_short_periods'] = np.where(
|
|
294
|
+
pd_candles['idmax_short_periods'] > pd_candles['idmin_short_periods'],
|
|
295
|
+
pd_candles['max_short_periods'] - pd_candles['min_short_periods'], # Up swing
|
|
296
|
+
pd_candles['min_short_periods'] - pd_candles['max_short_periods'] # Down swing (negative)
|
|
297
|
+
)
|
|
113
298
|
|
|
114
|
-
pd_candles['
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
299
|
+
pd_candles['price_swing_long_periods'] = np.where(
|
|
300
|
+
pd_candles['idmax_long_periods'] > pd_candles['idmin_long_periods'],
|
|
301
|
+
pd_candles['max_long_periods'] - pd_candles['min_long_periods'], # Up swing
|
|
302
|
+
pd_candles['min_long_periods'] - pd_candles['max_long_periods'] # Down swing (negative)
|
|
303
|
+
)
|
|
118
304
|
|
|
305
|
+
pd_candles['trend_from_highs_long_periods'] = np.where(
|
|
306
|
+
pd.isna(pd_candles['max_long_periods']),
|
|
307
|
+
None, # type: ignore
|
|
308
|
+
pd_candles['max_long_periods'].rolling(window=sliding_window_how_many_candles).apply(trend_from_highs, raw=True)
|
|
309
|
+
)
|
|
310
|
+
pd_candles['trend_from_lows_long_periods'] = np.where(
|
|
311
|
+
pd.isna(pd_candles['min_long_periods']),
|
|
312
|
+
None, # type: ignore
|
|
313
|
+
pd_candles['min_long_periods'].rolling(window=sliding_window_how_many_candles).apply(trend_from_lows, raw=True)
|
|
314
|
+
)
|
|
315
|
+
pd_candles['trend_from_highs_short_periods'] = np.where(
|
|
316
|
+
pd.isna(pd_candles['max_short_periods']),
|
|
317
|
+
None, # type: ignore
|
|
318
|
+
pd_candles['max_short_periods'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(trend_from_highs, raw=True)
|
|
319
|
+
)
|
|
320
|
+
pd_candles['trend_from_lows_short_periods'] = np.where(
|
|
321
|
+
pd.isna(pd_candles['min_short_periods']),
|
|
322
|
+
None, # type: ignore
|
|
323
|
+
pd_candles['min_short_periods'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(trend_from_lows, raw=True)
|
|
324
|
+
)
|
|
119
325
|
|
|
120
326
|
# ATR https://medium.com/codex/detecting-ranging-and-trending-markets-with-choppiness-index-in-python-1942e6450b58
|
|
121
327
|
pd_candles.loc[:,'h_l'] = pd_candles['high'] - pd_candles['low']
|
|
@@ -123,7 +329,9 @@ def compute_candles_stats(
|
|
|
123
329
|
pd_candles.loc[:,'l_pc'] = abs(pd_candles['low'] - pd_candles['close'].shift(1))
|
|
124
330
|
pd_candles.loc[:,'tr'] = pd_candles[['h_l', 'h_pc', 'l_pc']].max(axis=1)
|
|
125
331
|
pd_candles.loc[:,'atr'] = pd_candles['tr'].rolling(window=sliding_window_how_many_candles).mean()
|
|
126
|
-
|
|
332
|
+
pd_candles.loc[:,'atr_avg_short_periods'] = pd_candles['atr'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).mean()
|
|
333
|
+
pd_candles.loc[:,'atr_avg_long_periods'] = pd_candles['atr'].rolling(window=sliding_window_how_many_candles).mean()
|
|
334
|
+
|
|
127
335
|
|
|
128
336
|
'''
|
|
129
337
|
@hardcode @todo
|
|
@@ -134,10 +342,10 @@ def compute_candles_stats(
|
|
|
134
342
|
Sometimes you may encounter "Exception has occurred: FloatingPointError invalid value encountered in scalar divide"
|
|
135
343
|
And for example adjusting window size from 120 to 125 will resolve the issue.
|
|
136
344
|
'''
|
|
345
|
+
if not hurst_exp_window_how_many_candles:
|
|
346
|
+
hurst_exp_window_how_many_candles = (sliding_window_how_many_candles if sliding_window_how_many_candles>=125 else 125)
|
|
137
347
|
pd_candles['hurst_exp'] = pd_candles['close'].rolling(
|
|
138
|
-
window=
|
|
139
|
-
hurst_exp_window_how_many_candles if hurst_exp_window_how_many_candles else (sliding_window_how_many_candles if sliding_window_how_many_candles>=125 else 125)
|
|
140
|
-
)
|
|
348
|
+
window=hurst_exp_window_how_many_candles
|
|
141
349
|
).apply(lambda x: compute_Hc(x, kind='price', simplified=True)[0])
|
|
142
350
|
|
|
143
351
|
|
|
@@ -167,14 +375,14 @@ def compute_candles_stats(
|
|
|
167
375
|
first_breach_index = aggressive_mask.idxmax()
|
|
168
376
|
candle_high = pd_candles.at[first_breach_index, 'high']
|
|
169
377
|
candle_low = pd_candles.at[first_breach_index, 'low']
|
|
170
|
-
candle_height = candle_high - candle_low
|
|
378
|
+
candle_height = candle_high - candle_low # type: ignore
|
|
171
379
|
else:
|
|
172
380
|
aggressive_mask = window['close'] <= window['boillenger_lower_agg']
|
|
173
381
|
if aggressive_mask.any():
|
|
174
382
|
first_breach_index = aggressive_mask.idxmax()
|
|
175
383
|
candle_high = pd_candles.at[first_breach_index, 'high']
|
|
176
384
|
candle_low = pd_candles.at[first_breach_index, 'low']
|
|
177
|
-
candle_height = candle_high - candle_low
|
|
385
|
+
candle_height = candle_high - candle_low # type: ignore
|
|
178
386
|
|
|
179
387
|
return {
|
|
180
388
|
'aggressive_move': aggressive_mask.any(),
|
|
@@ -279,10 +487,13 @@ def compute_candles_stats(
|
|
|
279
487
|
mitigated = pd_candles.iloc[idx + 1:row.name]['close'].lt(row['fvg_high']).any()
|
|
280
488
|
return mitigated
|
|
281
489
|
|
|
282
|
-
pd_candles['fvg_mitigated'] = pd_candles.apply(lambda row: compute_fvg_mitigated(row, pd_candles), axis=1)
|
|
283
|
-
|
|
490
|
+
pd_candles['fvg_mitigated'] = pd_candles.apply(lambda row: compute_fvg_mitigated(row, pd_candles), axis=1) # type: ignore
|
|
284
491
|
|
|
285
|
-
|
|
492
|
+
'''
|
|
493
|
+
RSI
|
|
494
|
+
Divergences from Bybit Learn https://www.youtube.com/watch?v=G9oUTi-PI18&t=809s
|
|
495
|
+
RSI Reversals from BK Traders https://www.youtube.com/watch?v=MvkbrHjiQlI
|
|
496
|
+
'''
|
|
286
497
|
pd_candles.loc[:,'close_delta'] = pd_candles['close'].diff()
|
|
287
498
|
pd_candles.loc[:,'close_delta_percent'] = pd_candles['close'].pct_change()
|
|
288
499
|
lo_up = pd_candles['close_delta'].clip(lower=0)
|
|
@@ -293,9 +504,9 @@ def compute_candles_stats(
|
|
|
293
504
|
if rsi_ema == True:
|
|
294
505
|
# Use exponential moving average
|
|
295
506
|
lo_ma_up = lo_up.ewm(
|
|
296
|
-
com =
|
|
507
|
+
com = rsi_sliding_window_how_many_candles -1,
|
|
297
508
|
adjust=True,
|
|
298
|
-
min_periods = rsi_sliding_window_how_many_candles
|
|
509
|
+
min_periods = rsi_sliding_window_how_many_candles).mean()
|
|
299
510
|
lo_ma_down = lo_down.ewm(
|
|
300
511
|
com = (rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles) - 1,
|
|
301
512
|
adjust=True,
|
|
@@ -303,15 +514,57 @@ def compute_candles_stats(
|
|
|
303
514
|
|
|
304
515
|
else:
|
|
305
516
|
# Use simple moving average
|
|
306
|
-
lo_ma_up = lo_up.rolling(window = rsi_sliding_window_how_many_candles
|
|
307
|
-
lo_ma_down = lo_down.rolling(window = rsi_sliding_window_how_many_candles
|
|
517
|
+
lo_ma_up = lo_up.rolling(window = rsi_sliding_window_how_many_candles).mean()
|
|
518
|
+
lo_ma_down = lo_down.rolling(window = rsi_sliding_window_how_many_candles).mean()
|
|
308
519
|
|
|
309
520
|
lo_rs = lo_ma_up / lo_ma_down
|
|
310
521
|
pd_candles.loc[:,'rsi'] = 100 - (100/(1 + lo_rs))
|
|
522
|
+
pd_candles['rsi_bucket'] = pd_candles['rsi'].apply(lambda x: bucketize_val(x, buckets=BUCKETS_m0_100))
|
|
311
523
|
pd_candles['ema_rsi'] = pd_candles['rsi'].ewm(
|
|
312
|
-
span=rsi_sliding_window_how_many_candles
|
|
524
|
+
span=rsi_sliding_window_how_many_candles,
|
|
313
525
|
adjust=False).mean()
|
|
314
526
|
|
|
527
|
+
rsi_rolling = pd_candles['rsi'].rolling(window=int(rsi_trend_sliding_window_how_many_candles))
|
|
528
|
+
pd_candles['rsi_max'] = rsi_rolling.max()
|
|
529
|
+
pd_candles['rsi_idmax'] = rsi_rolling.apply(lambda x : x.idxmax())
|
|
530
|
+
pd_candles['rsi_min'] = rsi_rolling.min()
|
|
531
|
+
pd_candles['rsi_idmin'] = rsi_rolling.apply(lambda x : x.idxmin())
|
|
532
|
+
|
|
533
|
+
def rsi_trend(
|
|
534
|
+
row,
|
|
535
|
+
rsi_upper_threshold : float = 70,
|
|
536
|
+
rsi_lower_threshold : float = 30):
|
|
537
|
+
if pd.isna(row['rsi_idmax']) or pd.isna(row['rsi_idmin']):
|
|
538
|
+
return np.nan
|
|
539
|
+
if row['rsi_idmax'] > row['rsi_idmin']:
|
|
540
|
+
return 'down' if row.name > row['rsi_idmax'] and row['rsi'] <= rsi_upper_threshold else 'up'
|
|
541
|
+
else:
|
|
542
|
+
return 'up' if row.name > row['rsi_idmin'] and row['rsi'] >= rsi_lower_threshold else 'down'
|
|
543
|
+
|
|
544
|
+
pd_candles['rsi_trend'] = pd_candles.apply(lambda row: rsi_trend(row), axis=1)
|
|
545
|
+
|
|
546
|
+
pd_candles['rsi_trend_from_highs'] = np.where(
|
|
547
|
+
pd.isna(pd_candles['rsi_max']),
|
|
548
|
+
None, # type: ignore
|
|
549
|
+
pd_candles['rsi_max'].rolling(window=rsi_trend_sliding_window_how_many_candles).apply(trend_from_highs, raw=True)
|
|
550
|
+
)
|
|
551
|
+
pd_candles['rsi_trend_from_lows'] = np.where(
|
|
552
|
+
pd.isna(pd_candles['rsi_min']),
|
|
553
|
+
None, # type: ignore
|
|
554
|
+
pd_candles['rsi_min'].rolling(window=rsi_trend_sliding_window_how_many_candles).apply(trend_from_lows, raw=True)
|
|
555
|
+
)
|
|
556
|
+
|
|
557
|
+
def _rsi_divergence(row):
|
|
558
|
+
trend_from_highs_long_periods = TrendDirection(row['trend_from_highs_long_periods']) if row['trend_from_highs_long_periods'] is not None and not pd.isna(row['trend_from_highs_long_periods']) else None # type: ignore
|
|
559
|
+
rsi_trend_from_highs = TrendDirection(row['rsi_trend_from_highs']) if row['rsi_trend_from_highs'] is not None and not pd.isna(row['rsi_trend_from_highs']) else None # type: ignore
|
|
560
|
+
|
|
561
|
+
if trend_from_highs_long_periods and rsi_trend_from_highs and trend_from_highs_long_periods == TrendDirection.LOWER_HIGHS and rsi_trend_from_highs == TrendDirection.HIGHER_HIGHS:
|
|
562
|
+
return 'bullish_divergence'
|
|
563
|
+
elif trend_from_highs_long_periods and rsi_trend_from_highs and trend_from_highs_long_periods == TrendDirection.HIGHER_HIGHS and rsi_trend_from_highs == TrendDirection.LOWER_HIGHS:
|
|
564
|
+
return 'bearish_divergence'
|
|
565
|
+
return 'no_divergence'
|
|
566
|
+
pd_candles['rsi_divergence'] = pd_candles.apply(_rsi_divergence, axis=1)
|
|
567
|
+
|
|
315
568
|
|
|
316
569
|
# MFI (Money Flow Index) https://randerson112358.medium.com/algorithmic-trading-strategy-using-money-flow-index-mfi-python-aa46461a5ea5
|
|
317
570
|
pd_candles['typical_price'] = (pd_candles['high'] + pd_candles['low'] + pd_candles['close']) / 3
|
|
@@ -328,59 +581,116 @@ def compute_candles_stats(
|
|
|
328
581
|
rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).sum()
|
|
329
582
|
pd_candles['money_flow_ratio'] = pd_candles['positive_flow_sum'] / pd_candles['negative_flow_sum']
|
|
330
583
|
pd_candles['mfi'] = 100 - (100 / (1 + pd_candles['money_flow_ratio']))
|
|
584
|
+
pd_candles['mfi_bucket'] = pd_candles['mfi'].apply(lambda x: bucketize_val(x, buckets=BUCKETS_m0_100))
|
|
331
585
|
|
|
332
586
|
|
|
333
587
|
# MACD https://www.investopedia.com/terms/m/macd.asp
|
|
588
|
+
# https://www.youtube.com/watch?v=jmPCL3l08ss
|
|
334
589
|
pd_candles['macd'] = pd_candles['ema_short_periods'] - pd_candles['ema_long_periods']
|
|
335
|
-
pd_candles['signal'] = pd_candles['macd'].ewm(span=
|
|
336
|
-
pd_candles['macd_minus_signal'] = pd_candles['macd'] - pd_candles['signal']
|
|
590
|
+
pd_candles['signal'] = pd_candles['macd'].ewm(span=int(sliding_window_how_many_candles/slow_fast_interval_ratio), adjust=False).mean()
|
|
591
|
+
pd_candles['macd_minus_signal'] = pd_candles['macd'] - pd_candles['signal'] # MACD histogram
|
|
592
|
+
macd_cur = pd_candles['macd_minus_signal']
|
|
593
|
+
macd_prev = pd_candles['macd_minus_signal'].shift(1)
|
|
594
|
+
bullish_macd_crosses = (macd_prev < 0) & (macd_cur > 0)
|
|
595
|
+
bearish_macd_crosses = (macd_prev > 0) & (macd_cur < 0)
|
|
596
|
+
pd_candles.loc[bullish_macd_crosses, 'macd_cross'] = 1
|
|
597
|
+
pd_candles.loc[bearish_macd_crosses, 'macd_cross'] = -1
|
|
598
|
+
bullish_indices = pd.Series(pd_candles.index.where(pd_candles['macd_cross'] == 1), index=pd_candles.index).astype('Int64')
|
|
599
|
+
bearish_indices = pd.Series(pd_candles.index.where(pd_candles['macd_cross'] == -1), index=pd_candles.index).astype('Int64')
|
|
600
|
+
pd_candles['macd_bullish_cross_last_id'] = bullish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
601
|
+
pd_candles['macd_bearish_cross_last_id'] = bearish_indices.rolling(window=pd_candles.shape[0], min_periods=1).max().astype('Int64')
|
|
602
|
+
conditions = [
|
|
603
|
+
(pd_candles['macd_bullish_cross_last_id'].notna() &
|
|
604
|
+
pd_candles['macd_bearish_cross_last_id'].notna() &
|
|
605
|
+
(pd_candles['macd_bullish_cross_last_id'] > pd_candles['macd_bearish_cross_last_id'])),
|
|
606
|
+
|
|
607
|
+
(pd_candles['macd_bullish_cross_last_id'].notna() &
|
|
608
|
+
pd_candles['macd_bearish_cross_last_id'].notna() &
|
|
609
|
+
(pd_candles['macd_bearish_cross_last_id'] > pd_candles['macd_bullish_cross_last_id'])),
|
|
610
|
+
|
|
611
|
+
(pd_candles['macd_bullish_cross_last_id'].notna() &
|
|
612
|
+
pd_candles['macd_bearish_cross_last_id'].isna()),
|
|
613
|
+
|
|
614
|
+
(pd_candles['macd_bearish_cross_last_id'].notna() &
|
|
615
|
+
pd_candles['macd_bullish_cross_last_id'].isna())
|
|
616
|
+
]
|
|
617
|
+
choices = ['bullish', 'bearish', 'bullish', 'bearish']
|
|
618
|
+
pd_candles['macd_cross_last'] = np.select(conditions, choices, default=None) # type: ignore
|
|
619
|
+
pd_candles.loc[bullish_macd_crosses, 'macd_cross'] = 'bullish'
|
|
620
|
+
pd_candles.loc[bearish_macd_crosses, 'macd_cross'] = 'bearish'
|
|
337
621
|
|
|
338
622
|
if not pypy_compat:
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
623
|
+
calculate_slope(
|
|
624
|
+
pd_data=pd_candles,
|
|
625
|
+
src_col_name='close',
|
|
626
|
+
slope_col_name='close_short_slope',
|
|
627
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles/slow_fast_interval_ratio)
|
|
628
|
+
)
|
|
345
629
|
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
630
|
+
calculate_slope(
|
|
631
|
+
pd_data=pd_candles,
|
|
632
|
+
src_col_name='close',
|
|
633
|
+
slope_col_name='close_long_slope',
|
|
634
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
635
|
+
)
|
|
349
636
|
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
637
|
+
calculate_slope(
|
|
638
|
+
pd_data=pd_candles,
|
|
639
|
+
src_col_name='ema_short_periods',
|
|
640
|
+
slope_col_name='ema_short_slope',
|
|
641
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles/slow_fast_interval_ratio)
|
|
642
|
+
)
|
|
353
643
|
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
644
|
+
calculate_slope(
|
|
645
|
+
pd_data=pd_candles,
|
|
646
|
+
src_col_name='ema_long_periods',
|
|
647
|
+
slope_col_name='ema_long_slope',
|
|
648
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
649
|
+
)
|
|
357
650
|
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
651
|
+
calculate_slope(
|
|
652
|
+
pd_data=pd_candles,
|
|
653
|
+
src_col_name='boillenger_upper',
|
|
654
|
+
slope_col_name='boillenger_upper_slope',
|
|
655
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
656
|
+
)
|
|
361
657
|
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
658
|
+
calculate_slope(
|
|
659
|
+
pd_data=pd_candles,
|
|
660
|
+
src_col_name='boillenger_lower',
|
|
661
|
+
slope_col_name='boillenger_lower_slope',
|
|
662
|
+
sliding_window_how_many_candles=int(sliding_window_how_many_candles)
|
|
663
|
+
)
|
|
365
664
|
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
665
|
+
calculate_slope(
|
|
666
|
+
pd_data=pd_candles,
|
|
667
|
+
src_col_name='ema_rsi',
|
|
668
|
+
slope_col_name='ema_rsi_slope',
|
|
669
|
+
sliding_window_how_many_candles=int(rsi_trend_sliding_window_how_many_candles)
|
|
670
|
+
)
|
|
369
671
|
|
|
370
672
|
pd_candles['regular_divergence'] = (
|
|
371
673
|
(pd_candles['ema_long_slope'] > 0) & (pd_candles['ema_rsi_slope'] < 0) |
|
|
372
674
|
(pd_candles['ema_long_slope'] < 0) & (pd_candles['ema_rsi_slope'] > 0)
|
|
373
675
|
)
|
|
676
|
+
|
|
677
|
+
calculate_slope(
|
|
678
|
+
pd_data=pd_candles,
|
|
679
|
+
src_col_name='hurst_exp',
|
|
680
|
+
slope_col_name='hurst_exp_slope',
|
|
681
|
+
sliding_window_how_many_candles=hurst_exp_window_how_many_candles
|
|
682
|
+
)
|
|
374
683
|
|
|
375
684
|
|
|
376
685
|
# Fibonacci
|
|
377
|
-
|
|
378
|
-
pd_candles['
|
|
379
|
-
pd_candles['fib_618_long_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_long_periods'], rw['idmin_long_periods'], rw['max_long_periods'], rw['idmax_long_periods'], TARGET_FIB_LEVEL), axis=1)
|
|
686
|
+
pd_candles[f'fib_{target_fib_level}_short_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_short_periods'], rw['idmin_short_periods'], rw['max_short_periods'], rw['idmax_short_periods'], target_fib_level), axis=1)
|
|
687
|
+
pd_candles[f'fib_{target_fib_level}_long_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_long_periods'], rw['idmin_long_periods'], rw['max_long_periods'], rw['idmax_long_periods'], target_fib_level), axis=1)
|
|
380
688
|
|
|
381
689
|
|
|
382
690
|
# Inflection points
|
|
383
691
|
pd_candles['gap_close_vs_ema'] = pd_candles['close'] - pd_candles['ema_long_periods']
|
|
692
|
+
pd_candles['gap_close_vs_ema_percent'] = pd_candles['gap_close_vs_ema']/pd_candles['close'] *100
|
|
693
|
+
|
|
384
694
|
pd_candles['close_above_or_below_ema'] = None
|
|
385
695
|
pd_candles.loc[pd_candles['gap_close_vs_ema'] > 0, 'close_above_or_below_ema'] = 'above'
|
|
386
696
|
pd_candles.loc[pd_candles['gap_close_vs_ema'] < 0, 'close_above_or_below_ema'] = 'below'
|
|
@@ -390,6 +700,68 @@ def compute_candles_stats(
|
|
|
390
700
|
'close_vs_ema_inflection'
|
|
391
701
|
] = np.sign(pd_candles['close'] - pd_candles['ema_long_periods'])
|
|
392
702
|
|
|
703
|
+
def lookup_fib_target(
|
|
704
|
+
row,
|
|
705
|
+
pd_candles,
|
|
706
|
+
target_fib_level : float = 0.618
|
|
707
|
+
) -> Union[Dict, None]:
|
|
708
|
+
if row is None:
|
|
709
|
+
return None
|
|
710
|
+
|
|
711
|
+
fib_target_short_periods = None
|
|
712
|
+
fib_target_long_periods = None
|
|
713
|
+
|
|
714
|
+
max_short_periods = row['max_short_periods']
|
|
715
|
+
idmax_short_periods = int(row['idmax_short_periods']) if not math.isnan(row['idmax_short_periods']) else None
|
|
716
|
+
max_long_periods = row['max_long_periods']
|
|
717
|
+
idmax_long_periods = int(row['idmax_long_periods']) if not math.isnan(row['idmax_long_periods']) else None
|
|
718
|
+
|
|
719
|
+
min_short_periods = row['min_short_periods']
|
|
720
|
+
idmin_short_periods = int(row['idmin_short_periods']) if not math.isnan(row['idmin_short_periods']) else None
|
|
721
|
+
min_long_periods = row['min_long_periods']
|
|
722
|
+
idmin_long_periods = int(row['idmin_long_periods']) if not math.isnan(row['idmin_long_periods']) else None
|
|
723
|
+
|
|
724
|
+
if idmax_short_periods and idmin_short_periods and idmax_short_periods>0 and idmin_short_periods>0:
|
|
725
|
+
if idmax_short_periods>idmin_short_periods and idmax_short_periods < len(pd_candles):
|
|
726
|
+
# Falling from prev peak
|
|
727
|
+
last_peak = pd_candles.iloc[idmax_short_periods]
|
|
728
|
+
fib_target_short_periods = last_peak[f'fib_{target_fib_level}_short_periods'] if not math.isnan(last_peak[f'fib_{target_fib_level}_short_periods']) else None
|
|
729
|
+
|
|
730
|
+
else:
|
|
731
|
+
# Bouncing from prev bottom
|
|
732
|
+
if idmin_short_periods < len(pd_candles):
|
|
733
|
+
last_bottom = pd_candles.iloc[idmin_short_periods]
|
|
734
|
+
fib_target_short_periods = last_bottom[f'fib_{target_fib_level}_short_periods'] if not math.isnan(last_bottom[f'fib_{target_fib_level}_short_periods']) else None
|
|
735
|
+
|
|
736
|
+
if idmax_long_periods and idmin_long_periods and idmax_long_periods>0 and idmin_long_periods>0:
|
|
737
|
+
if idmax_long_periods>idmin_long_periods and idmax_long_periods < len(pd_candles):
|
|
738
|
+
# Falling from prev peak
|
|
739
|
+
last_peak = pd_candles.iloc[idmax_long_periods]
|
|
740
|
+
fib_target_long_periods = last_peak[f'fib_{target_fib_level}_long_periods'] if not math.isnan(last_peak[f'fib_{target_fib_level}_long_periods']) else None
|
|
741
|
+
|
|
742
|
+
else:
|
|
743
|
+
# Bouncing from prev bottom
|
|
744
|
+
if idmin_long_periods < len(pd_candles):
|
|
745
|
+
last_bottom = pd_candles.iloc[idmin_long_periods]
|
|
746
|
+
fib_target_long_periods = last_bottom[f'fib_{target_fib_level}_long_periods'] if not math.isnan(last_bottom[f'fib_{target_fib_level}_long_periods']) else None
|
|
747
|
+
|
|
748
|
+
return {
|
|
749
|
+
'short_periods' : {
|
|
750
|
+
'idmin' : idmin_short_periods,
|
|
751
|
+
'idmax' : idmax_short_periods,
|
|
752
|
+
'min' : min_short_periods,
|
|
753
|
+
'max' : max_short_periods,
|
|
754
|
+
'fib_target' : fib_target_short_periods,
|
|
755
|
+
},
|
|
756
|
+
'long_periods' : {
|
|
757
|
+
'idmin' : idmin_long_periods,
|
|
758
|
+
'idmax' : idmax_long_periods,
|
|
759
|
+
'min' : min_long_periods,
|
|
760
|
+
'max' : max_long_periods,
|
|
761
|
+
'fib_target' : fib_target_long_periods
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
|
|
393
765
|
'''
|
|
394
766
|
The implementation from Geeksforgeeks https://www.geeksforgeeks.org/find-indices-of-all-local-maxima-and-local-minima-in-an-array/ is wrong.
|
|
395
767
|
If you have consecutive-duplicates, things will gall apart!
|
|
@@ -799,3 +1171,40 @@ def partition_sliding_window(
|
|
|
799
1171
|
'maxima' : maxima,
|
|
800
1172
|
'segments' : consolidated_segements
|
|
801
1173
|
}
|
|
1174
|
+
|
|
1175
|
+
# This relies on statsmodels.api, which is not pypy compatible
|
|
1176
|
+
def compute_pair_stats(
|
|
1177
|
+
pd_candles : pd.DataFrame,
|
|
1178
|
+
how_many_candles : int = 24*7
|
|
1179
|
+
) -> None:
|
|
1180
|
+
import statsmodels.api as sm
|
|
1181
|
+
|
|
1182
|
+
def _compute_hedge_ratio(
|
|
1183
|
+
prices0 : List[float],
|
|
1184
|
+
prices1 : List[float]
|
|
1185
|
+
):
|
|
1186
|
+
model = sm.OLS(prices0, prices1).fit()
|
|
1187
|
+
hedge_ratio = model.params[0]
|
|
1188
|
+
return hedge_ratio
|
|
1189
|
+
|
|
1190
|
+
pd_candles['hedge_ratio'] = np.nan
|
|
1191
|
+
for j in range(how_many_candles, pd_candles.shape[0]):
|
|
1192
|
+
window = pd_candles.iloc[j-how_many_candles:j]
|
|
1193
|
+
hedge_ratio = _compute_hedge_ratio(window['close_1'].values, window['close_2'].values) # type: ignore
|
|
1194
|
+
pd_candles.loc[j, 'hedge_ratio'] = hedge_ratio
|
|
1195
|
+
|
|
1196
|
+
pd_candles['close_spread'] = pd_candles['close_1'] - (pd_candles['close_2'] * pd_candles['hedge_ratio']) # You're fitting one hedge_ratio over a windows
|
|
1197
|
+
mean = pd_candles['close_spread'].rolling(how_many_candles).mean()
|
|
1198
|
+
std = pd_candles['close_spread'].rolling(how_many_candles).std()
|
|
1199
|
+
pd_candles['close_spread_mean'] = mean
|
|
1200
|
+
pd_candles['close_spread_std'] = std
|
|
1201
|
+
pd_candles['zscore_close_spread'] = (pd_candles['close_spread'] - mean)/std
|
|
1202
|
+
pd_candles['zscore_close_spread_min'] = pd_candles['zscore_close_spread'].rolling(how_many_candles).min()
|
|
1203
|
+
pd_candles['zscore_close_spread_max'] = pd_candles['zscore_close_spread'].rolling(how_many_candles).max()
|
|
1204
|
+
|
|
1205
|
+
calculate_slope(
|
|
1206
|
+
pd_data=pd_candles,
|
|
1207
|
+
src_col_name='zscore_close_spread',
|
|
1208
|
+
slope_col_name='zscore_slope',
|
|
1209
|
+
sliding_window_how_many_candles=how_many_candles
|
|
1210
|
+
)
|