siglab-py 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siglab-py might be problematic. Click here for more details.
- siglab_py/__init__.py +0 -0
- siglab_py/constants.py +3 -0
- siglab_py/exchanges/__init__.py +0 -0
- siglab_py/exchanges/any_exchange.py +20 -0
- siglab_py/market_data_providers/__init__.py +0 -0
- siglab_py/market_data_providers/aggregated_orderbook_provider.py +451 -0
- siglab_py/market_data_providers/candles_provider.py +342 -0
- siglab_py/market_data_providers/candles_ta_provider.py +263 -0
- siglab_py/market_data_providers/deribit_options_expiry_provider.py +197 -0
- siglab_py/market_data_providers/orderbooks_provider.py +359 -0
- siglab_py/market_data_providers/test_provider.py +70 -0
- siglab_py/ordergateway/__init__.py +0 -0
- siglab_py/ordergateway/client.py +137 -0
- siglab_py/ordergateway/encrypt_keys_util.py +43 -0
- siglab_py/ordergateway/gateway.py +658 -0
- siglab_py/ordergateway/test_ordergateway.py +140 -0
- siglab_py/tests/__init__.py +0 -0
- siglab_py/tests/integration/__init__.py +0 -0
- siglab_py/tests/integration/market_data_util_tests.py +123 -0
- siglab_py/tests/unit/__init__.py +0 -0
- siglab_py/util/__init__.py +0 -0
- siglab_py/util/analytic_util.py +792 -0
- siglab_py/util/aws_util.py +47 -0
- siglab_py/util/market_data_util.py +385 -0
- siglab_py/util/retry_util.py +15 -0
- siglab_py-0.1.0.dist-info/METADATA +36 -0
- siglab_py-0.1.0.dist-info/RECORD +29 -0
- siglab_py-0.1.0.dist-info/WHEEL +5 -0
- siglab_py-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,792 @@
|
|
|
1
|
+
import tzlocal
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from typing import List, Dict, Union, NoReturn, Any, Tuple
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import math
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import numpy as np
|
|
8
|
+
from hurst import compute_Hc # compatible with pypy
|
|
9
|
+
|
|
10
|
+
from ccxt.base.exchange import Exchange as CcxtExchange
|
|
11
|
+
from ccxt import deribit
|
|
12
|
+
|
|
13
|
+
from util.market_data_util import fix_column_types
|
|
14
|
+
|
|
15
|
+
# Fibonacci
|
|
16
|
+
MAGIC_FIB_LEVELS = [0, 0.236, 0.382, 0.5, 0.618, 0.786, 1.00, 1.618, 2.618, 3.618, 4.236]
|
|
17
|
+
|
|
18
|
+
def estimate_fib_retracement(
|
|
19
|
+
swing_low: float,
|
|
20
|
+
swing_low_idx: int,
|
|
21
|
+
swing_high: float,
|
|
22
|
+
swing_high_idx: int,
|
|
23
|
+
target_fib_level: float = 0.618
|
|
24
|
+
) -> float:
|
|
25
|
+
price_range = swing_high - swing_low
|
|
26
|
+
|
|
27
|
+
# https://blog.quantinsti.com/fibonacci-retracement-trading-strategy-python/
|
|
28
|
+
if swing_low_idx < swing_high_idx:
|
|
29
|
+
retracement_price = swing_high - (price_range * target_fib_level)
|
|
30
|
+
else:
|
|
31
|
+
retracement_price = swing_low + (price_range * target_fib_level)
|
|
32
|
+
|
|
33
|
+
return retracement_price
|
|
34
|
+
|
|
35
|
+
'''
|
|
36
|
+
compute_candles_stats will calculate typical/basic technical indicators using in many trading strategies:
|
|
37
|
+
a. Basic SMA/EMAs (And slopes)
|
|
38
|
+
b. ATR
|
|
39
|
+
c. Boillenger bands (Yes incorrect spelling sorry)
|
|
40
|
+
d. FVG
|
|
41
|
+
e. Hurst Exponent
|
|
42
|
+
f. RSI, MFI
|
|
43
|
+
g. MACD
|
|
44
|
+
h. Fibonacci
|
|
45
|
+
i. Inflections points: where 'close' crosses EMA from above or below.
|
|
46
|
+
|
|
47
|
+
Parameters:
|
|
48
|
+
a. boillenger_std_multiples: For boillenger upper and lower calc
|
|
49
|
+
b. sliding_window_how_many_candles: Moving averages calculation
|
|
50
|
+
c. rsi_ema: RSI calculated using EMA or SMA?
|
|
51
|
+
d. boillenger_ema: Boillenger calculated using SMA or EMA?
|
|
52
|
+
e. slow_fast_interval_ratios
|
|
53
|
+
MACD calculated using two moving averages.
|
|
54
|
+
Slow line using 'sliding_window_how_many_candles' intervals.
|
|
55
|
+
Fast line using 'sliding_window_how_many_candles/slow_fast_interval_ratios' intervals.
|
|
56
|
+
Example,
|
|
57
|
+
if Slow line is calculated using 24 candles and short_long_interval_ratios = 3,
|
|
58
|
+
then Fast line is calculated using 24/3 = 8 candles.
|
|
59
|
+
'''
|
|
60
|
+
def compute_candles_stats(
|
|
61
|
+
pd_candles : pd.DataFrame,
|
|
62
|
+
boillenger_std_multiples : float,
|
|
63
|
+
sliding_window_how_many_candles : int,
|
|
64
|
+
rsi_ema : bool = True,
|
|
65
|
+
boillenger_ema : bool = False,
|
|
66
|
+
slow_fast_interval_ratio : float = 3,
|
|
67
|
+
rsi_sliding_window_how_many_candles : Union[int, None] = None, # RSI standard 14
|
|
68
|
+
hurst_exp_window_how_many_candles : Union[int, None] = None, # Hurst exp standard 100-200
|
|
69
|
+
boillenger_std_multiples_for_aggressive_moves_detect : int = 3, # Aggressive moves if candle low/high breaches boillenger bands from 3 standard deviations.
|
|
70
|
+
pypy_compat : bool = True
|
|
71
|
+
):
|
|
72
|
+
pd_candles['candle_height'] = pd_candles['high'] - pd_candles['low']
|
|
73
|
+
|
|
74
|
+
'''
|
|
75
|
+
market_data_gizmo inserted dummy lines --> Need exclude those or "TypeError: unorderable types for comparison": pd_btc_candles = pd_btc_candles[pd_btc_candles.close.notnull()]
|
|
76
|
+
|
|
77
|
+
pd_btc_candles.loc[
|
|
78
|
+
(pd_btc_candles['close_above_or_below_ema'] != pd_btc_candles['close_above_or_below_ema'].shift(1)) &
|
|
79
|
+
(abs(pd_btc_candles['gap_close_vs_ema']) > avg_gap_close_vs_ema),
|
|
80
|
+
'close_vs_ema_inflection'
|
|
81
|
+
] = np.sign(pd_btc_candles['close'] - pd_btc_candles['ema_long_periods']) <-- TypeError: unorderable types for comparison
|
|
82
|
+
'''
|
|
83
|
+
# pd_candles = pd_candles[pd_candles.close.notnull()] # Don't make a copy. Drop in-place
|
|
84
|
+
|
|
85
|
+
fix_column_types(pd_candles) # Do this AFTER filtering. Or you'd mess up index, introduce error around idmax, idmin. fix_column_types will drop all 'unnamed' columns and reset_index.
|
|
86
|
+
|
|
87
|
+
pd_candles['is_green'] = pd_candles['close'] >= pd_candles['open']
|
|
88
|
+
|
|
89
|
+
pd_candles['pct_change_close'] = pd_candles['close'].pct_change() * 100
|
|
90
|
+
pd_candles['sma_short_periods'] = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).mean()
|
|
91
|
+
pd_candles['sma_long_periods'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).mean()
|
|
92
|
+
pd_candles['ema_short_periods'] = pd_candles['close'].ewm(span=int(sliding_window_how_many_candles/slow_fast_interval_ratio), adjust=False).mean()
|
|
93
|
+
pd_candles['ema_long_periods'] = pd_candles['close'].ewm(span=sliding_window_how_many_candles, adjust=False).mean()
|
|
94
|
+
pd_candles['ema_close'] = pd_candles['ema_long_periods'] # Alias, shorter name
|
|
95
|
+
pd_candles['std'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).std()
|
|
96
|
+
|
|
97
|
+
pd_candles['ema_volume_short_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles/slow_fast_interval_ratio, adjust=False).mean()
|
|
98
|
+
pd_candles['ema_volume_long_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles, adjust=False).mean()
|
|
99
|
+
|
|
100
|
+
pd_candles['max_short_periods'] = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).max()
|
|
101
|
+
pd_candles['max_long_periods'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).max()
|
|
102
|
+
pd_candles['idmax_short_periods'] = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(lambda x : x.idxmax())
|
|
103
|
+
pd_candles['idmax_long_periods'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).apply(lambda x : x.idxmax())
|
|
104
|
+
|
|
105
|
+
pd_candles['min_short_periods'] = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).min()
|
|
106
|
+
pd_candles['min_long_periods'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).min()
|
|
107
|
+
pd_candles['idmin_short_periods'] = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(lambda x : x.idxmin())
|
|
108
|
+
pd_candles['idmin_long_periods'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).apply(lambda x : x.idxmin())
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
# ATR https://medium.com/codex/detecting-ranging-and-trending-markets-with-choppiness-index-in-python-1942e6450b58
|
|
112
|
+
pd_candles.loc[:,'h_l'] = pd_candles['high'] - pd_candles['low']
|
|
113
|
+
pd_candles.loc[:,'h_pc'] = abs(pd_candles['high'] - pd_candles['close'].shift(1))
|
|
114
|
+
pd_candles.loc[:,'l_pc'] = abs(pd_candles['low'] - pd_candles['close'].shift(1))
|
|
115
|
+
pd_candles.loc[:,'tr'] = pd_candles[['h_l', 'h_pc', 'l_pc']].max(axis=1)
|
|
116
|
+
pd_candles.loc[:,'atr'] = pd_candles['tr'].rolling(window=sliding_window_how_many_candles).mean()
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
'''
|
|
120
|
+
@hardcode @todo
|
|
121
|
+
Hurst https://towardsdatascience.com/introduction-to-the-hurst-exponent-with-code-in-python-4da0414ca52e
|
|
122
|
+
Smaller Windows (e.g., 50–100)
|
|
123
|
+
Larger Windows (e.g., 200+)
|
|
124
|
+
|
|
125
|
+
Sometimes you may encounter "Exception has occurred: FloatingPointError invalid value encountered in scalar divide"
|
|
126
|
+
And for example adjusting window size from 120 to 125 will resolve the issue.
|
|
127
|
+
'''
|
|
128
|
+
pd_candles['hurst_exp'] = pd_candles['close'].rolling(
|
|
129
|
+
window=(
|
|
130
|
+
hurst_exp_window_how_many_candles if hurst_exp_window_how_many_candles else (sliding_window_how_many_candles if sliding_window_how_many_candles>=125 else 125)
|
|
131
|
+
)
|
|
132
|
+
).apply(lambda x: compute_Hc(x, kind='price', simplified=True)[0])
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
# Boillenger https://www.quantifiedstrategies.com/python-bollinger-band-trading-strategy/
|
|
136
|
+
pd_candles.loc[:,'boillenger_upper'] = (pd_candles['sma_long_periods'] if not boillenger_ema else pd_candles['ema_long_periods']) + pd_candles['std'] * boillenger_std_multiples
|
|
137
|
+
pd_candles.loc[:,'boillenger_lower'] = (pd_candles['sma_long_periods'] if not boillenger_ema else pd_candles['ema_long_periods']) - pd_candles['std'] * boillenger_std_multiples
|
|
138
|
+
pd_candles.loc[:,'boillenger_channel_height'] = pd_candles['boillenger_upper'] - pd_candles['boillenger_lower']
|
|
139
|
+
|
|
140
|
+
pd_candles.loc[:,'boillenger_upper_agg'] = (pd_candles['sma_long_periods'] if not boillenger_ema else pd_candles['ema_long_periods']) + pd_candles['std'] * boillenger_std_multiples_for_aggressive_moves_detect
|
|
141
|
+
pd_candles.loc[:,'boillenger_lower_agg'] = (pd_candles['sma_long_periods'] if not boillenger_ema else pd_candles['ema_long_periods']) - pd_candles['std'] * boillenger_std_multiples_for_aggressive_moves_detect
|
|
142
|
+
pd_candles.loc[:,'boillenger_channel_height_agg'] = pd_candles['boillenger_upper_agg'] - pd_candles['boillenger_lower_agg']
|
|
143
|
+
|
|
144
|
+
def detect_aggressive_movement(
|
|
145
|
+
index: int,
|
|
146
|
+
pd_candles: pd.DataFrame,
|
|
147
|
+
sliding_window_how_many_candles: int,
|
|
148
|
+
up_or_down: bool = True
|
|
149
|
+
):
|
|
150
|
+
window_start = max(0, index - sliding_window_how_many_candles + 1)
|
|
151
|
+
window = pd_candles.iloc[window_start:index + 1]
|
|
152
|
+
first_breach_index = None
|
|
153
|
+
candle_high, candle_low, candle_height = None, None, None
|
|
154
|
+
|
|
155
|
+
if up_or_down:
|
|
156
|
+
aggressive_mask = window['close'] >= window['boillenger_upper_agg']
|
|
157
|
+
if aggressive_mask.any():
|
|
158
|
+
first_breach_index = aggressive_mask.idxmax()
|
|
159
|
+
candle_high = pd_candles.at[first_breach_index, 'high']
|
|
160
|
+
candle_low = pd_candles.at[first_breach_index, 'low']
|
|
161
|
+
candle_height = candle_high - candle_low
|
|
162
|
+
else:
|
|
163
|
+
aggressive_mask = window['close'] <= window['boillenger_lower_agg']
|
|
164
|
+
if aggressive_mask.any():
|
|
165
|
+
first_breach_index = aggressive_mask.idxmax()
|
|
166
|
+
candle_high = pd_candles.at[first_breach_index, 'high']
|
|
167
|
+
candle_low = pd_candles.at[first_breach_index, 'low']
|
|
168
|
+
candle_height = candle_high - candle_low
|
|
169
|
+
|
|
170
|
+
return {
|
|
171
|
+
'aggressive_move': aggressive_mask.any(),
|
|
172
|
+
'first_breach_index': first_breach_index,
|
|
173
|
+
'candle_high' : candle_high,
|
|
174
|
+
'candle_low' : candle_low,
|
|
175
|
+
'candle_height' : candle_height
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
pd_candles['aggressive_up'] = pd_candles.index.to_series().apply(
|
|
179
|
+
lambda idx: detect_aggressive_movement(
|
|
180
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=True
|
|
181
|
+
)['aggressive_move']
|
|
182
|
+
)
|
|
183
|
+
pd_candles['aggressive_up_index'] = pd_candles.index.to_series().apply(
|
|
184
|
+
lambda idx: detect_aggressive_movement(
|
|
185
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=True
|
|
186
|
+
)['first_breach_index']
|
|
187
|
+
)
|
|
188
|
+
pd_candles['aggressive_up_candle_height'] = pd_candles.index.to_series().apply(
|
|
189
|
+
lambda idx: detect_aggressive_movement(
|
|
190
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=True
|
|
191
|
+
)['candle_height']
|
|
192
|
+
)
|
|
193
|
+
pd_candles['aggressive_up_candle_high'] = pd_candles.index.to_series().apply(
|
|
194
|
+
lambda idx: detect_aggressive_movement(
|
|
195
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=True
|
|
196
|
+
)['candle_high']
|
|
197
|
+
)
|
|
198
|
+
pd_candles['aggressive_up_candle_low'] = pd_candles.index.to_series().apply(
|
|
199
|
+
lambda idx: detect_aggressive_movement(
|
|
200
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=True
|
|
201
|
+
)['candle_low']
|
|
202
|
+
)
|
|
203
|
+
pd_candles['aggressive_down'] = pd_candles.index.to_series().apply(
|
|
204
|
+
lambda idx: detect_aggressive_movement(
|
|
205
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=False
|
|
206
|
+
)['aggressive_move']
|
|
207
|
+
)
|
|
208
|
+
pd_candles['aggressive_down_index'] = pd_candles.index.to_series().apply(
|
|
209
|
+
lambda idx: detect_aggressive_movement(
|
|
210
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=False
|
|
211
|
+
)['first_breach_index']
|
|
212
|
+
)
|
|
213
|
+
pd_candles['aggressive_down_candle_height'] = pd_candles.index.to_series().apply(
|
|
214
|
+
lambda idx: detect_aggressive_movement(
|
|
215
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=False
|
|
216
|
+
)['candle_height']
|
|
217
|
+
)
|
|
218
|
+
pd_candles['aggressive_down_candle_high'] = pd_candles.index.to_series().apply(
|
|
219
|
+
lambda idx: detect_aggressive_movement(
|
|
220
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=False
|
|
221
|
+
)['candle_high']
|
|
222
|
+
)
|
|
223
|
+
pd_candles['aggressive_down_candle_low'] = pd_candles.index.to_series().apply(
|
|
224
|
+
lambda idx: detect_aggressive_movement(
|
|
225
|
+
idx, pd_candles, sliding_window_how_many_candles, up_or_down=False
|
|
226
|
+
)['candle_low']
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
# FVG - Fair Value Gap https://atas.net/technical-analysis/fvg-trading-what-is-fair-value-gap-meaning-strategy/
|
|
231
|
+
def compute_fvg(row, pd_candles):
|
|
232
|
+
fvg_low = None
|
|
233
|
+
fvg_high = None
|
|
234
|
+
|
|
235
|
+
if row['aggressive_up_index'] is not None and not math.isnan(row['aggressive_up_index']):
|
|
236
|
+
idx = row['aggressive_up_index']
|
|
237
|
+
last_high = pd_candles.at[idx - 1, 'high']
|
|
238
|
+
if idx + 1 < len(pd_candles):
|
|
239
|
+
next_low = pd_candles.at[idx + 1, 'low']
|
|
240
|
+
else:
|
|
241
|
+
next_low = None
|
|
242
|
+
|
|
243
|
+
fvg_low = next_low
|
|
244
|
+
fvg_high = last_high
|
|
245
|
+
|
|
246
|
+
elif row['aggressive_down_index'] is not None and not math.isnan(row['aggressive_down_index']):
|
|
247
|
+
idx = row['aggressive_down_index']
|
|
248
|
+
last_low = pd_candles.at[idx - 1, 'low']
|
|
249
|
+
if idx + 1 < len(pd_candles):
|
|
250
|
+
next_high = pd_candles.at[idx + 1, 'high']
|
|
251
|
+
else:
|
|
252
|
+
next_high = None
|
|
253
|
+
|
|
254
|
+
fvg_low = last_low
|
|
255
|
+
fvg_high = next_high
|
|
256
|
+
|
|
257
|
+
return pd.Series({'fvg_low': fvg_low, 'fvg_high': fvg_high})
|
|
258
|
+
|
|
259
|
+
fvg_result = pd_candles.apply(lambda row: compute_fvg(row, pd_candles), axis=1)
|
|
260
|
+
pd_candles[['fvg_low', 'fvg_high']] = fvg_result
|
|
261
|
+
pd_candles['fvg_gap'] = pd_candles['fvg_high'] - pd_candles['fvg_low']
|
|
262
|
+
|
|
263
|
+
def compute_fvg_mitigated(row, pd_candles):
|
|
264
|
+
mitigated = False
|
|
265
|
+
if row['aggressive_down_index'] is not None and not math.isnan(row['aggressive_down_index']):
|
|
266
|
+
idx = int(row['aggressive_down_index'])
|
|
267
|
+
mitigated = pd_candles.iloc[idx + 1:row.name]['close'].gt(row['fvg_low']).any()
|
|
268
|
+
elif row['aggressive_up_index'] is not None and not math.isnan(row['aggressive_up_index']):
|
|
269
|
+
idx = int(row['aggressive_up_index'])
|
|
270
|
+
mitigated = pd_candles.iloc[idx + 1:row.name]['close'].lt(row['fvg_high']).any()
|
|
271
|
+
return mitigated
|
|
272
|
+
|
|
273
|
+
pd_candles['fvg_mitigated'] = pd_candles.apply(lambda row: compute_fvg_mitigated(row, pd_candles), axis=1)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
# RSI - https://www.youtube.com/watch?v=G9oUTi-PI18&t=809s
|
|
277
|
+
pd_candles.loc[:,'close_delta'] = pd_candles['close'].diff()
|
|
278
|
+
pd_candles.loc[:,'close_delta_percent'] = pd_candles['close'].pct_change()
|
|
279
|
+
lo_up = pd_candles['close_delta'].clip(lower=0)
|
|
280
|
+
lo_down = -1 * pd_candles['close_delta'].clip(upper=0)
|
|
281
|
+
pd_candles.loc[:,'up'] = lo_up
|
|
282
|
+
pd_candles.loc[:,'down'] = lo_down
|
|
283
|
+
|
|
284
|
+
if rsi_ema == True:
|
|
285
|
+
# Use exponential moving average
|
|
286
|
+
lo_ma_up = lo_up.ewm(
|
|
287
|
+
com = (rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles) - 1,
|
|
288
|
+
adjust=True,
|
|
289
|
+
min_periods = rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).mean()
|
|
290
|
+
lo_ma_down = lo_down.ewm(
|
|
291
|
+
com = (rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles) - 1,
|
|
292
|
+
adjust=True,
|
|
293
|
+
min_periods = rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).mean()
|
|
294
|
+
|
|
295
|
+
else:
|
|
296
|
+
# Use simple moving average
|
|
297
|
+
lo_ma_up = lo_up.rolling(window = rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).mean()
|
|
298
|
+
lo_ma_down = lo_down.rolling(window = rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).mean()
|
|
299
|
+
|
|
300
|
+
lo_rs = lo_ma_up / lo_ma_down
|
|
301
|
+
pd_candles.loc[:,'rsi'] = 100 - (100/(1 + lo_rs))
|
|
302
|
+
pd_candles['ema_rsi'] = pd_candles['rsi'].ewm(
|
|
303
|
+
span=rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles,
|
|
304
|
+
adjust=False).mean()
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
# MFI (Money Flow Index) https://randerson112358.medium.com/algorithmic-trading-strategy-using-money-flow-index-mfi-python-aa46461a5ea5
|
|
308
|
+
pd_candles['typical_price'] = (pd_candles['high'] + pd_candles['low'] + pd_candles['close']) / 3
|
|
309
|
+
pd_candles['money_flow'] = pd_candles['typical_price'] * pd_candles['volume']
|
|
310
|
+
pd_candles['money_flow_positive'] = pd_candles['money_flow'].where(
|
|
311
|
+
pd_candles['typical_price'] > pd_candles['typical_price'].shift(1), 0
|
|
312
|
+
)
|
|
313
|
+
pd_candles['money_flow_negative'] = pd_candles['money_flow'].where(
|
|
314
|
+
pd_candles['typical_price'] < pd_candles['typical_price'].shift(1), 0
|
|
315
|
+
)
|
|
316
|
+
pd_candles['positive_flow_sum'] = pd_candles['money_flow_positive'].rolling(
|
|
317
|
+
rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).sum()
|
|
318
|
+
pd_candles['negative_flow_sum'] = pd_candles['money_flow_negative'].rolling(
|
|
319
|
+
rsi_sliding_window_how_many_candles if rsi_sliding_window_how_many_candles else sliding_window_how_many_candles).sum()
|
|
320
|
+
pd_candles['money_flow_ratio'] = pd_candles['positive_flow_sum'] / pd_candles['negative_flow_sum']
|
|
321
|
+
pd_candles['mfi'] = 100 - (100 / (1 + pd_candles['money_flow_ratio']))
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
# MACD https://www.investopedia.com/terms/m/macd.asp
|
|
325
|
+
pd_candles['macd'] = pd_candles['ema_short_periods'] - pd_candles['ema_long_periods']
|
|
326
|
+
pd_candles['signal'] = pd_candles['macd'].ewm(span=9, adjust=False).mean()
|
|
327
|
+
pd_candles['macd_minus_signal'] = pd_candles['macd'] - pd_candles['signal']
|
|
328
|
+
|
|
329
|
+
if not pypy_compat:
|
|
330
|
+
import statsmodels.api as sm # in-compatible with pypy
|
|
331
|
+
|
|
332
|
+
# Slopes
|
|
333
|
+
X = sm.add_constant(range(len(pd_candles['close'])))
|
|
334
|
+
rolling_slope = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
335
|
+
pd_candles['close_short_slope'] = rolling_slope
|
|
336
|
+
|
|
337
|
+
X = sm.add_constant(range(len(pd_candles['close'])))
|
|
338
|
+
rolling_slope = pd_candles['close'].rolling(window=sliding_window_how_many_candles).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
339
|
+
pd_candles['close_long_slope'] = rolling_slope
|
|
340
|
+
|
|
341
|
+
X = sm.add_constant(range(len(pd_candles['ema_short_periods'])))
|
|
342
|
+
rolling_slope = pd_candles['ema_short_periods'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
343
|
+
pd_candles['ema_short_slope'] = rolling_slope
|
|
344
|
+
|
|
345
|
+
X = sm.add_constant(range(len(pd_candles['ema_long_periods'])))
|
|
346
|
+
rolling_slope = pd_candles['ema_long_periods'].rolling(window=sliding_window_how_many_candles).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
347
|
+
pd_candles['ema_long_slope'] = rolling_slope
|
|
348
|
+
|
|
349
|
+
X = sm.add_constant(range(len(pd_candles['boillenger_upper'])))
|
|
350
|
+
rolling_slope = pd_candles['boillenger_upper'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio)).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
351
|
+
pd_candles['boillenger_upper_slope'] = rolling_slope
|
|
352
|
+
|
|
353
|
+
X = sm.add_constant(range(len(pd_candles['boillenger_lower'])))
|
|
354
|
+
rolling_slope = pd_candles['boillenger_lower'].rolling(window=sliding_window_how_many_candles).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
355
|
+
pd_candles['boillenger_lower_slope'] = rolling_slope
|
|
356
|
+
|
|
357
|
+
X = sm.add_constant(range(len(pd_candles['ema_rsi'])))
|
|
358
|
+
rolling_slope = pd_candles['ema_rsi'].rolling(window=sliding_window_how_many_candles).apply(lambda x: sm.OLS(x, X[:len(x)]).fit().params[1], raw=False)
|
|
359
|
+
pd_candles['ema_rsi_slope'] = rolling_slope
|
|
360
|
+
|
|
361
|
+
pd_candles['regular_divergence'] = (
|
|
362
|
+
(pd_candles['ema_long_slope'] > 0) & (pd_candles['ema_rsi_slope'] < 0) |
|
|
363
|
+
(pd_candles['ema_long_slope'] < 0) & (pd_candles['ema_rsi_slope'] > 0)
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
# Fibonacci
|
|
368
|
+
TARGET_FIB_LEVEL = 0.618
|
|
369
|
+
pd_candles['fib_618_short_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_short_periods'], rw['idmin_short_periods'], rw['max_short_periods'], rw['idmax_short_periods'], TARGET_FIB_LEVEL), axis=1)
|
|
370
|
+
pd_candles['fib_618_long_periods'] = pd_candles.apply(lambda rw : estimate_fib_retracement(rw['min_long_periods'], rw['idmin_long_periods'], rw['max_long_periods'], rw['idmax_long_periods'], TARGET_FIB_LEVEL), axis=1)
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
# Inflection points
|
|
374
|
+
pd_candles['gap_close_vs_ema'] = pd_candles['close'] - pd_candles['ema_long_periods']
|
|
375
|
+
pd_candles['close_above_or_below_ema'] = None
|
|
376
|
+
pd_candles.loc[pd_candles['gap_close_vs_ema'] > 0, 'close_above_or_below_ema'] = 'above'
|
|
377
|
+
pd_candles.loc[pd_candles['gap_close_vs_ema'] < 0, 'close_above_or_below_ema'] = 'below'
|
|
378
|
+
|
|
379
|
+
pd_candles.loc[
|
|
380
|
+
(pd_candles['close_above_or_below_ema'] != pd_candles['close_above_or_below_ema'].shift(-1)),
|
|
381
|
+
'close_vs_ema_inflection'
|
|
382
|
+
] = np.sign(pd_candles['close'] - pd_candles['ema_long_periods'])
|
|
383
|
+
|
|
384
|
+
'''
|
|
385
|
+
The implementation from Geeksforgeeks https://www.geeksforgeeks.org/find-indices-of-all-local-maxima-and-local-minima-in-an-array/ is wrong.
|
|
386
|
+
If you have consecutive-duplicates, things will gall apart!
|
|
387
|
+
Example 1: values = [ 1, 2, 3, 7, 11, 15, 13, 12, 11, 6, 5, 7, 11, 8]
|
|
388
|
+
The default implementation correctly identify "15" as a peak.
|
|
389
|
+
|
|
390
|
+
Example 2: values = [ 1, 2, 3, 7, 11, 15, 15, 13, 12, 11, 6, 5, 7, 11, 8]
|
|
391
|
+
The default implementation will mark "11" as local maxima because there are two consecutive 15's.
|
|
392
|
+
|
|
393
|
+
Fix: https://stackoverflow.com/questions/75013708/python-finding-local-minima-and-maxima?noredirect=1#comment132376733_75013708
|
|
394
|
+
'''
|
|
395
|
+
def find_local_max_min(values: List[float], merge_distance: int = 5) -> Union[Dict[str, List], None]:
|
|
396
|
+
mx = []
|
|
397
|
+
mn = []
|
|
398
|
+
|
|
399
|
+
n = len(values)
|
|
400
|
+
if n < 2:
|
|
401
|
+
return None
|
|
402
|
+
|
|
403
|
+
if values[0] > values[1]:
|
|
404
|
+
mn.append(0)
|
|
405
|
+
elif values[0] < values[1]:
|
|
406
|
+
mx.append(0)
|
|
407
|
+
|
|
408
|
+
for i in range(1, n-1):
|
|
409
|
+
if all(values[i] >= values[j] for j in range(i-10, i+11) if 0 <= j < n):
|
|
410
|
+
mx.append(i)
|
|
411
|
+
elif all(values[i] <= values[j] for j in range(i-10, i+11) if 0 <= j < n):
|
|
412
|
+
mn.append(i)
|
|
413
|
+
|
|
414
|
+
if values[-1] > values[-2]:
|
|
415
|
+
mx.append(n-1)
|
|
416
|
+
elif values[-1] < values[-2]:
|
|
417
|
+
mn.append(n-1)
|
|
418
|
+
|
|
419
|
+
# Merge nearby maxima and minima
|
|
420
|
+
mx_merged = []
|
|
421
|
+
mn_merged = []
|
|
422
|
+
|
|
423
|
+
def merge_nearby_points(points):
|
|
424
|
+
merged = []
|
|
425
|
+
start = points[0]
|
|
426
|
+
for i in range(1, len(points)):
|
|
427
|
+
if points[i] - start > merge_distance:
|
|
428
|
+
merged.append(start + (points[i-1] - start) // 2) # Take the middle point
|
|
429
|
+
start = points[i]
|
|
430
|
+
merged.append(start + (points[-1] - start) // 2) # Take the middle point for the last segment
|
|
431
|
+
return merged
|
|
432
|
+
|
|
433
|
+
mx_merged = merge_nearby_points(mx)
|
|
434
|
+
mn_merged = merge_nearby_points(mn)
|
|
435
|
+
|
|
436
|
+
return {
|
|
437
|
+
'local_max': mx_merged,
|
|
438
|
+
'local_min': mn_merged
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
def partition_sliding_window(
|
|
442
|
+
pd_candles : pd.DataFrame,
|
|
443
|
+
sliding_window_how_many_candles : int,
|
|
444
|
+
smoothing_window_size_ratio : int,
|
|
445
|
+
linregress_stderr_threshold : float,
|
|
446
|
+
max_recur_depth : int,
|
|
447
|
+
min_segment_size_how_many_candles : int,
|
|
448
|
+
segment_consolidate_slope_ratio_threshold : float,
|
|
449
|
+
sideway_price_condition_threshold : float
|
|
450
|
+
) -> Dict[str, Any]:
|
|
451
|
+
|
|
452
|
+
window_size = int(sliding_window_how_many_candles/smoothing_window_size_ratio)
|
|
453
|
+
# window_size = 8 # @hack
|
|
454
|
+
smoothed_colse = pd.Series(pd_candles['close']).rolling(window=window_size, min_periods=window_size).mean()
|
|
455
|
+
pd_candles['smoothed_close'] = smoothed_colse
|
|
456
|
+
|
|
457
|
+
pd_candles['maxima'] = False
|
|
458
|
+
pd_candles['minima'] = False
|
|
459
|
+
maxima = []
|
|
460
|
+
minima = []
|
|
461
|
+
maxima_minima = find_local_max_min(values = pd_candles['close'].to_list(), merge_distance=1) # @CRITICAL close vs smoothed_close and merge_distance
|
|
462
|
+
if maxima_minima:
|
|
463
|
+
maxima = maxima_minima['local_max']
|
|
464
|
+
minima = maxima_minima['local_min']
|
|
465
|
+
maxima = [x for x in maxima if x>=pd_candles.index.min()]
|
|
466
|
+
minima = [x for x in minima if x>=pd_candles.index.min()]
|
|
467
|
+
pd_candles.loc[maxima, 'maxima'] = True
|
|
468
|
+
pd_candles.loc[minima, 'minima'] = True
|
|
469
|
+
|
|
470
|
+
inflection_points = pd_candles[(pd_candles.close_vs_ema_inflection == 1) | (pd_candles.close_vs_ema_inflection == -1)].index.tolist()
|
|
471
|
+
inflection_points = [ index-1 for index in inflection_points ]
|
|
472
|
+
if (pd_candles.shape[0]-1) not in inflection_points:
|
|
473
|
+
inflection_points.append(pd_candles.shape[0]-1)
|
|
474
|
+
|
|
475
|
+
last_point = inflection_points[0]
|
|
476
|
+
sparse_inflection_points = [ last_point ]
|
|
477
|
+
for point in inflection_points:
|
|
478
|
+
if (point not in sparse_inflection_points) and ((point-last_point)>min_segment_size_how_many_candles):
|
|
479
|
+
sparse_inflection_points.append(point)
|
|
480
|
+
last_point = point
|
|
481
|
+
inflection_points = sparse_inflection_points
|
|
482
|
+
|
|
483
|
+
def _compute_new_segment(
|
|
484
|
+
pd_candles : pd.DataFrame,
|
|
485
|
+
start_index : int,
|
|
486
|
+
end_index : int,
|
|
487
|
+
cur_recur_depth : int,
|
|
488
|
+
linregress_stderr_threshold : float = 50,
|
|
489
|
+
max_recur_depth : int = 2,
|
|
490
|
+
min_segment_size_how_many_candles : int = 15
|
|
491
|
+
) -> Union[List[Dict], None]:
|
|
492
|
+
new_segments : Union[List[Dict], None] = None
|
|
493
|
+
|
|
494
|
+
if end_index>pd_candles.shape[0]-1:
|
|
495
|
+
end_index = pd_candles.shape[0]-1
|
|
496
|
+
|
|
497
|
+
if start_index==end_index:
|
|
498
|
+
return []
|
|
499
|
+
|
|
500
|
+
start_upper = pd_candles.iloc[start_index]['boillenger_upper']
|
|
501
|
+
end_upper = pd_candles.iloc[end_index]['boillenger_upper']
|
|
502
|
+
start_lower = pd_candles.iloc[start_index]['boillenger_lower']
|
|
503
|
+
end_lower = pd_candles.iloc[end_index]['boillenger_lower']
|
|
504
|
+
|
|
505
|
+
start_datetime = pd_candles.iloc[start_index]['datetime']
|
|
506
|
+
end_datetime = pd_candles.iloc[end_index]['datetime']
|
|
507
|
+
start_timestamp_ms = pd_candles.iloc[start_index]['timestamp_ms']
|
|
508
|
+
end_timestamp_ms = pd_candles.iloc[end_index]['timestamp_ms']
|
|
509
|
+
start_close = pd_candles.iloc[start_index]['close']
|
|
510
|
+
end_close = pd_candles.iloc[end_index]['close']
|
|
511
|
+
|
|
512
|
+
# Using Boillenger upper and lower only
|
|
513
|
+
maxima_idx_boillenger = [ start_index, end_index ]
|
|
514
|
+
maxima_close_boillenger = [ start_upper, end_upper ]
|
|
515
|
+
minima_idx_boillenger = [ start_index, end_index ]
|
|
516
|
+
minima_close_boillenger = [ start_lower, end_lower ]
|
|
517
|
+
|
|
518
|
+
from scipy.stats import linregress # in-compatible with pypy
|
|
519
|
+
|
|
520
|
+
maxima_linregress_boillenger = linregress(maxima_idx_boillenger, maxima_close_boillenger)
|
|
521
|
+
minima_linregress_boillenger = linregress(minima_idx_boillenger, minima_close_boillenger)
|
|
522
|
+
|
|
523
|
+
# Using Boillenger upper and lower AND Local maxima/minima
|
|
524
|
+
maxima_idx_full = [start_index] + [ x for x in maxima if x>=start_index+1 and x<end_index ] + [end_index]
|
|
525
|
+
maxima_close_full = [ start_upper if not math.isnan(start_upper) else start_close ] + [ pd_candles.loc[x]['close'] for x in maxima if x>start_index and x<end_index ] + [ end_upper ]
|
|
526
|
+
minima_idx_full = [start_index] + [ x for x in minima if x>=start_index+1 and x<end_index ] + [end_index]
|
|
527
|
+
minima_close_full = [ start_lower if not math.isnan(start_lower) else start_close ] + [ pd_candles.loc[x]['close'] for x in minima if x>start_index and x<end_index ] + [ end_lower ]
|
|
528
|
+
|
|
529
|
+
maxima_linregress_full = linregress(maxima_idx_full, maxima_close_full)
|
|
530
|
+
minima_linregress_full = linregress(minima_idx_full, minima_close_full)
|
|
531
|
+
|
|
532
|
+
largest_candle_index : int = int(pd_candles.iloc[start_index:end_index,:]['candle_height'].idxmax())
|
|
533
|
+
if (
|
|
534
|
+
(abs(maxima_linregress_full.stderr) < linregress_stderr_threshold and abs(minima_linregress_full.stderr) < linregress_stderr_threshold) # type: ignore Otherwise, Error: Cannot access attribute "stderr" for class "_"
|
|
535
|
+
or cur_recur_depth>=max_recur_depth
|
|
536
|
+
or (start_index==largest_candle_index or end_index==largest_candle_index+1)
|
|
537
|
+
or (
|
|
538
|
+
(end_index-largest_candle_index < min_segment_size_how_many_candles)
|
|
539
|
+
or (largest_candle_index - start_index < min_segment_size_how_many_candles)
|
|
540
|
+
)
|
|
541
|
+
):
|
|
542
|
+
new_segment = {
|
|
543
|
+
'start' : start_index,
|
|
544
|
+
'end' : end_index,
|
|
545
|
+
'start_datetime' : start_datetime,
|
|
546
|
+
'end_datetime' : end_datetime,
|
|
547
|
+
'start_timestamp_ms' : start_timestamp_ms,
|
|
548
|
+
'end_timestamp_ms' : end_timestamp_ms,
|
|
549
|
+
'start_close' : start_close,
|
|
550
|
+
'end_close' : end_close,
|
|
551
|
+
'window_size_num_intervals' : end_index - start_index,
|
|
552
|
+
'cur_recur_depth' : cur_recur_depth,
|
|
553
|
+
'up_or_down' : 'up' if end_close>=start_close else 'down',
|
|
554
|
+
|
|
555
|
+
'volume' : pd_candles[start_index:end_index]['volume'].sum(), # in base_ccy
|
|
556
|
+
|
|
557
|
+
'maxima_idx_boillenger' : maxima_idx_boillenger,
|
|
558
|
+
'maxima_close_boillenger' : maxima_close_boillenger,
|
|
559
|
+
'minima_idx_boillenger' : minima_idx_boillenger,
|
|
560
|
+
'minima_close_boillenger' : minima_close_boillenger,
|
|
561
|
+
|
|
562
|
+
'maxima_linregress_boillenger' : maxima_linregress_boillenger,
|
|
563
|
+
'minima_linregress_boillenger' : minima_linregress_boillenger,
|
|
564
|
+
'maxima_linregress_full' : maxima_linregress_full,
|
|
565
|
+
'minima_linregress_full' : minima_linregress_full,
|
|
566
|
+
}
|
|
567
|
+
new_segments = [ new_segment ]
|
|
568
|
+
else:
|
|
569
|
+
|
|
570
|
+
new_segments1 = _compute_new_segment(pd_candles, start_index, largest_candle_index, cur_recur_depth+1)
|
|
571
|
+
new_segments2 = _compute_new_segment(pd_candles, largest_candle_index+1, end_index, cur_recur_depth+1)
|
|
572
|
+
new_segments = (new_segments1 or []) + (new_segments2 or [])
|
|
573
|
+
|
|
574
|
+
return new_segments
|
|
575
|
+
|
|
576
|
+
segments = []
|
|
577
|
+
for end_index in inflection_points:
|
|
578
|
+
if not segments:
|
|
579
|
+
start_index = 0
|
|
580
|
+
|
|
581
|
+
inscope_maxima = [ x for x in maxima if x>=0 and x<end_index ]
|
|
582
|
+
inscope_minima = [ x for x in minima if x>=0 and x<end_index ]
|
|
583
|
+
|
|
584
|
+
if inscope_maxima and inscope_minima:
|
|
585
|
+
if sliding_window_how_many_candles<end_index:
|
|
586
|
+
start_index = sliding_window_how_many_candles
|
|
587
|
+
new_segments = _compute_new_segment(pd_candles, start_index, end_index, 0, linregress_stderr_threshold, max_recur_depth, min_segment_size_how_many_candles)
|
|
588
|
+
segments = (segments or []) + (new_segments or [])
|
|
589
|
+
|
|
590
|
+
else:
|
|
591
|
+
start_index = segments[-1]['end']
|
|
592
|
+
if start_index!=end_index:
|
|
593
|
+
new_segments = _compute_new_segment(pd_candles, start_index, end_index, 0, linregress_stderr_threshold, max_recur_depth, min_segment_size_how_many_candles)
|
|
594
|
+
if new_segments:
|
|
595
|
+
segments = segments + new_segments
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
'''
|
|
599
|
+
You have five kinds of wedges:
|
|
600
|
+
a. Rising parallel
|
|
601
|
+
b. Rising converging
|
|
602
|
+
c. Side way
|
|
603
|
+
d. Falling parallel
|
|
604
|
+
e. Falling converging
|
|
605
|
+
|
|
606
|
+
Here, we're merging 'parallel' segments based on slope of 'maxima_linregress_boillenger' and 'minima_linregress_boillenger' from adjacent segments.
|
|
607
|
+
'''
|
|
608
|
+
consolidated_segements = [ segments[0] ]
|
|
609
|
+
for segment in segments:
|
|
610
|
+
if segment not in consolidated_segements:
|
|
611
|
+
last_segment = consolidated_segements[-1]
|
|
612
|
+
last_segment_maxima_slope = last_segment['maxima_linregress_boillenger'].slope
|
|
613
|
+
last_segment_minima_slope = last_segment['minima_linregress_boillenger'].slope
|
|
614
|
+
this_segment_maxima_slope = segment['maxima_linregress_boillenger'].slope
|
|
615
|
+
this_segment_minima_slope = segment['minima_linregress_boillenger'].slope
|
|
616
|
+
if math.isnan(last_segment_maxima_slope) or math.isnan(last_segment_minima_slope):
|
|
617
|
+
consolidated_segements.append(segment)
|
|
618
|
+
else:
|
|
619
|
+
if (
|
|
620
|
+
abs(last_segment_maxima_slope/this_segment_maxima_slope-1)<segment_consolidate_slope_ratio_threshold
|
|
621
|
+
and abs(last_segment_minima_slope/this_segment_minima_slope-1)<segment_consolidate_slope_ratio_threshold
|
|
622
|
+
):
|
|
623
|
+
consolidated_segements.pop()
|
|
624
|
+
|
|
625
|
+
start_index = last_segment['maxima_idx_boillenger'][0]
|
|
626
|
+
end_index = segment['maxima_idx_boillenger'][-1]
|
|
627
|
+
maxima_idx_boillenger = [ start_index, end_index ]
|
|
628
|
+
maxima_close_boillenger = [ last_segment['maxima_close_boillenger'][0], segment['maxima_close_boillenger'][-1] ]
|
|
629
|
+
minima_idx_boillenger = maxima_idx_boillenger
|
|
630
|
+
minima_close_boillenger = [ last_segment['minima_close_boillenger'][0], segment['minima_close_boillenger'][-1] ]
|
|
631
|
+
|
|
632
|
+
from scipy.stats import linregress # in-compatible with pypy
|
|
633
|
+
|
|
634
|
+
maxima_linregress_boillenger = linregress(maxima_idx_boillenger, maxima_close_boillenger)
|
|
635
|
+
minima_linregress_boillenger = linregress(minima_idx_boillenger, minima_close_boillenger)
|
|
636
|
+
|
|
637
|
+
# Using Boillenger upper and lower AND Local maxima/minima
|
|
638
|
+
start_upper = pd_candles.iloc[start_index]['boillenger_upper']
|
|
639
|
+
end_upper = pd_candles.iloc[end_index]['boillenger_upper']
|
|
640
|
+
start_lower = pd_candles.iloc[start_index]['boillenger_lower']
|
|
641
|
+
end_lower = pd_candles.iloc[end_index]['boillenger_lower']
|
|
642
|
+
maxima_idx_full = [last_segment['start']] + [ x for x in maxima if x>=start_index+1 and x<end_index ] + [segment['end']]
|
|
643
|
+
maxima_close_full = [ start_upper ] + [ pd_candles.loc[x]['close'] for x in maxima if x>start_index and x<end_index ] + [ end_upper ]
|
|
644
|
+
minima_idx_full = [last_segment['start']] + [ x for x in minima if x>=start_index+1 and x<end_index ] + [segment['end']]
|
|
645
|
+
minima_close_full = [ start_lower ] + [ pd_candles.loc[x]['close'] for x in minima if x>start_index and x<end_index ] + [ end_lower ]
|
|
646
|
+
|
|
647
|
+
maxima_linregress_full = linregress(maxima_idx_full, maxima_close_full)
|
|
648
|
+
minima_linregress_full = linregress(minima_idx_full, minima_close_full)
|
|
649
|
+
|
|
650
|
+
new_segment = {
|
|
651
|
+
'start' : last_segment['start'],
|
|
652
|
+
'end' : segment['end'],
|
|
653
|
+
'start_datetime' : last_segment['start_datetime'],
|
|
654
|
+
'end_datetime' : segment['end_datetime'],
|
|
655
|
+
'start_timestamp_ms' : last_segment['start_timestamp_ms'],
|
|
656
|
+
'end_timestamp_ms' : segment['end_timestamp_ms'],
|
|
657
|
+
'start_close' : last_segment['start_close'],
|
|
658
|
+
'end_close' : segment['end_close'],
|
|
659
|
+
'window_size_num_intervals' : end_index - start_index,
|
|
660
|
+
'cur_recur_depth' : max(last_segment['cur_recur_depth'], segment['cur_recur_depth']),
|
|
661
|
+
'up_or_down' : 'up' if segment['end_close']>=last_segment['start_close'] else 'down',
|
|
662
|
+
|
|
663
|
+
'volume' : pd_candles[start_index:end_index]['volume'].sum(), # in base_ccy
|
|
664
|
+
|
|
665
|
+
'maxima_idx_boillenger' : maxima_idx_boillenger,
|
|
666
|
+
'maxima_close_boillenger' : maxima_close_boillenger,
|
|
667
|
+
'minima_idx_boillenger' : minima_idx_boillenger,
|
|
668
|
+
'minima_close_boillenger' : minima_close_boillenger,
|
|
669
|
+
|
|
670
|
+
'maxima_linregress_boillenger' : maxima_linregress_boillenger,
|
|
671
|
+
'minima_linregress_boillenger' : minima_linregress_boillenger,
|
|
672
|
+
'maxima_linregress_full' : maxima_linregress_full,
|
|
673
|
+
'minima_linregress_full' : minima_linregress_full,
|
|
674
|
+
}
|
|
675
|
+
consolidated_segements.append(new_segment)
|
|
676
|
+
else:
|
|
677
|
+
consolidated_segements.append(segment)
|
|
678
|
+
|
|
679
|
+
'''
|
|
680
|
+
Depending on 'sliding_window_how_many_candles', pd_candles['boillenger_upper'] and pd_candles['boillenger_lower'] from 'compute_candles_stats' may be nan in first few segments.
|
|
681
|
+
So here, we're back filling pd_candles['boillenger_upper'] and pd_candles['boillenger_lower'] from subsequent segments.
|
|
682
|
+
'''
|
|
683
|
+
last_segment = consolidated_segements[-1]
|
|
684
|
+
for i in range(len(consolidated_segements)-1, -1, -1):
|
|
685
|
+
segment = consolidated_segements[i]
|
|
686
|
+
if math.isnan(segment['maxima_close_boillenger'][0]) or math.isnan(segment['minima_close_boillenger'][0]):
|
|
687
|
+
start_index = segment['start']
|
|
688
|
+
end_index = segment['end']
|
|
689
|
+
start_close = segment['start_close']
|
|
690
|
+
|
|
691
|
+
# Using Boillenger upper and lower only
|
|
692
|
+
maxima_idx_boillenger = segment['maxima_idx_boillenger']
|
|
693
|
+
minima_idx_boillenger = segment['minima_idx_boillenger']
|
|
694
|
+
maxima_close_boillenger = segment['maxima_close_boillenger']
|
|
695
|
+
minima_close_boillenger = segment['minima_close_boillenger']
|
|
696
|
+
if math.isnan(maxima_close_boillenger[-1]) or not math.isnan(minima_close_boillenger[-1]):
|
|
697
|
+
maxima_close_boillenger[-1] = last_segment['maxima_close_boillenger'][0]
|
|
698
|
+
minima_close_boillenger[-1] = last_segment['minima_close_boillenger'][0]
|
|
699
|
+
end_boillenger_height = maxima_close_boillenger[-1] - minima_close_boillenger[-1]
|
|
700
|
+
maxima_close_boillenger[0] = segment['start_close'] + end_boillenger_height/2
|
|
701
|
+
minima_close_boillenger[0] = segment['start_close'] - end_boillenger_height/2
|
|
702
|
+
|
|
703
|
+
from scipy.stats import linregress # in-compatible with pypy
|
|
704
|
+
|
|
705
|
+
maxima_linregress_boillenger = linregress(maxima_idx_boillenger, maxima_close_boillenger)
|
|
706
|
+
minima_linregress_boillenger = linregress(minima_idx_boillenger, minima_close_boillenger)
|
|
707
|
+
|
|
708
|
+
# Using Boillenger upper and lower AND Local maxima/minima
|
|
709
|
+
start_upper = maxima_close_boillenger[0]
|
|
710
|
+
end_upper = maxima_close_boillenger[-1]
|
|
711
|
+
start_lower = minima_close_boillenger[0]
|
|
712
|
+
end_lower = minima_close_boillenger[-1]
|
|
713
|
+
maxima_idx_full = [start_index] + [ x for x in maxima if x>=start_index+1 and x<end_index ] + [end_index]
|
|
714
|
+
maxima_close_full = [ start_upper if not math.isnan(start_upper) else start_close ] + [ pd_candles.loc[x]['close'] for x in maxima if x>start_index and x<end_index ] + [ end_upper ]
|
|
715
|
+
minima_idx_full = [start_index] + [ x for x in minima if x>=start_index+1 and x<end_index ] + [end_index]
|
|
716
|
+
minima_close_full = [ start_lower if not math.isnan(start_lower) else start_close ] + [ pd_candles.loc[x]['close'] for x in minima if x>start_index and x<end_index ] + [ end_lower ]
|
|
717
|
+
|
|
718
|
+
maxima_linregress_full = linregress(maxima_idx_full, maxima_close_full)
|
|
719
|
+
minima_linregress_full = linregress(minima_idx_full, minima_close_full)
|
|
720
|
+
|
|
721
|
+
segment['maxima_linregress_boillenger'] = maxima_linregress_boillenger
|
|
722
|
+
segment['minima_linregress_boillenger'] = minima_linregress_boillenger
|
|
723
|
+
segment['maxima_linregress_full'] = maxima_linregress_full
|
|
724
|
+
segment['minima_linregress_full'] = minima_linregress_full
|
|
725
|
+
|
|
726
|
+
last_segment = segment
|
|
727
|
+
|
|
728
|
+
'''
|
|
729
|
+
You have five kinds of wedges:
|
|
730
|
+
a. Rising parallel
|
|
731
|
+
b. Rising converging/diverging
|
|
732
|
+
c. Side way
|
|
733
|
+
d. Falling parallel
|
|
734
|
+
e. Falling converging/diverging
|
|
735
|
+
'''
|
|
736
|
+
def classify_segment(
|
|
737
|
+
segment : Dict,
|
|
738
|
+
segment_consolidate_slope_ratio_threshold : float,
|
|
739
|
+
sideway_price_condition_threshold : float
|
|
740
|
+
):
|
|
741
|
+
start_close = segment['start_close']
|
|
742
|
+
end_close = segment['end_close']
|
|
743
|
+
maxima_close_boillenger = segment['maxima_close_boillenger']
|
|
744
|
+
minima_close_boillenger = segment['minima_close_boillenger']
|
|
745
|
+
start_height : float = maxima_close_boillenger[0] - minima_close_boillenger[0]
|
|
746
|
+
end_height : float = maxima_close_boillenger[-1] - minima_close_boillenger[-1]
|
|
747
|
+
upper_slope = segment['maxima_linregress_boillenger'].slope
|
|
748
|
+
lower_slope = segment['minima_linregress_boillenger'].slope
|
|
749
|
+
is_parallel : bool = True if abs((upper_slope/lower_slope) -1) > segment_consolidate_slope_ratio_threshold else False
|
|
750
|
+
is_rising : bool = True if end_close>start_close else False
|
|
751
|
+
is_sideway : bool = True if abs((start_close/end_close)-1) < sideway_price_condition_threshold else False
|
|
752
|
+
|
|
753
|
+
is_converging : bool = True if start_height>end_height and start_height/end_height>2 else False
|
|
754
|
+
is_diverging : bool = True if end_height>start_height and end_height/start_height>2 else False
|
|
755
|
+
|
|
756
|
+
if is_sideway:
|
|
757
|
+
segment['class'] = 'sideway'
|
|
758
|
+
if is_converging:
|
|
759
|
+
segment['class'] = 'sideway_converging'
|
|
760
|
+
elif is_diverging:
|
|
761
|
+
segment['class'] = 'sideway_diverging'
|
|
762
|
+
|
|
763
|
+
else:
|
|
764
|
+
if is_rising:
|
|
765
|
+
if is_parallel:
|
|
766
|
+
segment['class'] = 'rising_parallel'
|
|
767
|
+
else:
|
|
768
|
+
if is_converging:
|
|
769
|
+
segment['class'] = 'rising_converging'
|
|
770
|
+
elif is_diverging:
|
|
771
|
+
segment['class'] = 'rising_diverging'
|
|
772
|
+
else:
|
|
773
|
+
segment['class'] = 'rising_parallel'
|
|
774
|
+
else:
|
|
775
|
+
if is_parallel:
|
|
776
|
+
segment['class'] = 'falling_parallel'
|
|
777
|
+
else:
|
|
778
|
+
if is_converging:
|
|
779
|
+
segment['class'] = 'falling_converging'
|
|
780
|
+
elif is_diverging:
|
|
781
|
+
segment['class'] = 'falling_diverging'
|
|
782
|
+
else:
|
|
783
|
+
segment['class'] = 'falling_parallel'
|
|
784
|
+
|
|
785
|
+
for segment in consolidated_segements:
|
|
786
|
+
classify_segment(segment, segment_consolidate_slope_ratio_threshold, sideway_price_condition_threshold)
|
|
787
|
+
|
|
788
|
+
return {
|
|
789
|
+
'minima' : minima,
|
|
790
|
+
'maxima' : maxima,
|
|
791
|
+
'segments' : consolidated_segements
|
|
792
|
+
}
|