siglab-py 0.5.66__py3-none-any.whl → 0.6.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siglab-py might be problematic. Click here for more details.

@@ -0,0 +1,2371 @@
1
+ # type: ignore Sorry sorry
2
+ import os
3
+ import logging
4
+ import argparse
5
+ import arrow
6
+ from datetime import datetime, timedelta, timezone
7
+ import time
8
+ from typing import List, Dict, Any, Union, Callable
9
+ import uuid
10
+ import math
11
+ import json
12
+ import inspect
13
+ import pandas as pd
14
+ import matplotlib.pyplot as plt
15
+ import matplotlib.dates as mdates
16
+
17
+ from ccxt.base.exchange import Exchange
18
+
19
+ from siglab_py.util.retry_util import retry
20
+ from siglab_py.util.market_data_util import fetch_candles, fix_column_types, fetch_historical_price, timestamp_to_week_of_month
21
+ from siglab_py.util.trading_util import calc_eff_trailing_sl
22
+ from siglab_py.util.analytic_util import compute_candles_stats, lookup_fib_target, partition_sliding_window
23
+ from siglab_py.util.simple_math import bucket_series, bucketize_val
24
+
25
+ def get_logger(report_name : str):
26
+ logging.Formatter.converter = time.gmtime
27
+ logger = logging.getLogger(report_name)
28
+ log_level = logging.INFO # DEBUG --> INFO --> WARNING --> ERROR
29
+ logger.setLevel(log_level)
30
+ format_str = '%(asctime)s %(message)s'
31
+ formatter = logging.Formatter(format_str)
32
+
33
+ sh = logging.StreamHandler()
34
+ sh.setLevel(log_level)
35
+ sh.setFormatter(formatter)
36
+ logger.addHandler(sh)
37
+ fh = logging.FileHandler(f"{report_name}.log", mode='w')
38
+ fh.setLevel(log_level)
39
+ fh.setFormatter(formatter)
40
+ logger.addHandler(fh)
41
+
42
+ return logger
43
+
44
+ def spawn_parameters(
45
+ flattened_parameters
46
+ ) -> List[Dict[str, Any]]:
47
+ algo_params : List[Dict[str, Any]] = []
48
+ for key in flattened_parameters:
49
+ _key = key.lower()
50
+ if _key in [ 'exchanges' ]:
51
+ continue
52
+
53
+ val = flattened_parameters[key]
54
+ if not algo_params:
55
+ assert(_key=="pypy_compat")
56
+ param_dict = {_key : val}
57
+ algo_params.append(param_dict)
58
+
59
+ else:
60
+ cloned_algo_params = None
61
+
62
+ for existing_algo_param in algo_params:
63
+ if type(val) not in [list, List]:
64
+ existing_algo_param[_key] = val
65
+ else:
66
+ if _key == 'hi_how_many_candles_values':
67
+ for x in val:
68
+ existing_algo_param['hi_stats_computed_over_how_many_candles'] = x[1]
69
+ existing_algo_param['hi_candle_size'] = x[0]
70
+ existing_algo_param['hi_how_many_candles'] = x[2]
71
+ elif _key == 'hi_ma_short_vs_long_interval_values':
72
+ for x in val:
73
+ existing_algo_param['hi_ma_short_interval'] = x[0]
74
+ existing_algo_param['hi_ma_long_interval'] = x[1]
75
+
76
+ elif _key == 'lo_how_many_candles_values':
77
+ for x in val:
78
+ existing_algo_param['lo_stats_computed_over_how_many_candles'] = x[1]
79
+ existing_algo_param['lo_candle_size'] = x[0]
80
+ existing_algo_param['lo_how_many_candles'] = x[2]
81
+
82
+ elif _key == 'lo_ma_short_vs_long_interval_values':
83
+ for x in val:
84
+ existing_algo_param['lo_ma_short_interval'] = x[0]
85
+ existing_algo_param['lo_ma_long_interval'] = x[1]
86
+
87
+ elif _key in [ 'white_list_tickers', 'additional_trade_fields', 'cautious_dayofweek', 'allow_entry_dayofweek', 'mapped_event_codes', 'ecoevents_mapped_regions' ]:
88
+ existing_algo_param[_key] = val
89
+
90
+ else:
91
+ if len(val)>1:
92
+ cloned_algo_params = []
93
+
94
+ if _key not in [ 'start_dates']:
95
+ _key = _key.replace("_values","")
96
+ elif _key == 'start_dates':
97
+ _key = 'start_date'
98
+
99
+ i = 0
100
+ for x in val:
101
+
102
+ if i==0:
103
+ existing_algo_param[_key] = x
104
+ else:
105
+ cloned_algo_param = existing_algo_param.copy()
106
+ cloned_algo_param[_key] = x
107
+ cloned_algo_params.append(cloned_algo_param)
108
+ i+=1
109
+
110
+ if cloned_algo_params:
111
+ algo_params = algo_params + cloned_algo_params
112
+ cloned_algo_params.clear()
113
+ cloned_algo_params = None
114
+
115
+ param_id : int = 0
116
+ for algo_param in algo_params:
117
+ start_date = algo_param.pop('start_date')
118
+ name_exclude_start_date = ""
119
+ for key in algo_param:
120
+ name_exclude_start_date += f"{key}: {algo_param[key]}|"
121
+ name = "start_date: {start_date}|" + name_exclude_start_date
122
+ algo_param['param_id'] = param_id
123
+ algo_param['start_date'] = start_date
124
+ algo_param['name'] = name
125
+ algo_param['name_exclude_start_date'] = name_exclude_start_date
126
+
127
+ # Purpose is to avoid snowball effect in equity curves in long dated back tests.
128
+ if 'constant_order_notional' not in algo_param:
129
+ algo_param['constant_order_notional'] = True
130
+ algo_param['target_order_notional'] = None
131
+ if algo_param['constant_order_notional']:
132
+ algo_param['target_order_notional'] = algo_param['initial_cash'] * algo_param['entry_percent_initial_cash']/100
133
+
134
+ param_id+=1
135
+ return algo_params
136
+
137
+ def create_plot_canvas(key : str, pd_hi_candles : pd.DataFrame, pd_lo_candles : pd.DataFrame):
138
+ SMALL_SIZE = 7
139
+ DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
140
+
141
+ plt.rc('figure', figsize=(25, 25))
142
+ plt.ion()
143
+ plt.rc('font', size=SMALL_SIZE)
144
+ plt.rc('axes', titlesize=SMALL_SIZE)
145
+ plt.rc('axes', labelsize=SMALL_SIZE)
146
+ plt.rc('xtick', labelsize=SMALL_SIZE)
147
+ plt.rc('ytick', labelsize=SMALL_SIZE)
148
+ plt.rc('legend', fontsize=SMALL_SIZE)
149
+ plt.rc('figure', titlesize=SMALL_SIZE)
150
+
151
+ fig, axes = plt.subplots(5, 1, gridspec_kw={'height_ratios': [3, 1, 1, 1, 1]})
152
+
153
+ date_numbers_hi = mdates.date2num(pd_hi_candles['datetime'])
154
+ date_numbers_lo = mdates.date2num(pd_lo_candles['datetime'])
155
+ major_locator = mdates.AutoDateLocator(minticks=3, maxticks=7)
156
+
157
+ for ax in axes:
158
+ ax.set_xticklabels([])
159
+ ax.minorticks_on()
160
+ ax.grid()
161
+ ax.xaxis.set_major_locator(major_locator)
162
+ ax.xaxis.set_major_formatter(mdates.DateFormatter(DATE_FORMAT))
163
+ ax.tick_params(axis="x", which='major', labelbottom=True, rotation=45)
164
+
165
+ time_series_canvas = axes[0]
166
+ time_series_canvas.minorticks_on()
167
+ time_series_canvas.grid()
168
+ time_series_canvas.set_ylabel(f'Close px and boillenger band {key}')
169
+ time_series_canvas.tick_params(axis="x", which='major', labelbottom=True, rotation=45)
170
+ time_series_canvas.plot(date_numbers_lo, pd_lo_candles['close'], color='darkblue', linewidth=2, label=f"close")
171
+ time_series_canvas.plot(date_numbers_hi, pd_hi_candles['boillenger_upper'], color='lightblue', linestyle='--', linewidth=0.5, label=f"upper boillenger (hi)")
172
+ time_series_canvas.plot(date_numbers_hi, pd_hi_candles['boillenger_lower'], color='lightblue', linestyle='--', linewidth=0.5, label=f"lower boillenger (hi)")
173
+ time_series_canvas.plot(date_numbers_lo, pd_lo_candles['boillenger_upper'], color='gray', linestyle='-', linewidth=1, label=f"upper boillenger (lo)")
174
+ time_series_canvas.plot(date_numbers_lo, pd_lo_candles['boillenger_lower'], color='gray', linestyle='-', linewidth=1, label=f"lower boillenger (lo)")
175
+ time_series_canvas.legend()
176
+
177
+ boillenger_channel_height_canvas = axes[1]
178
+ boillenger_channel_height_canvas.minorticks_on()
179
+ boillenger_channel_height_canvas.grid()
180
+ boillenger_channel_height_canvas.set_ylabel(f'boillenger channel height vs ATR band {key}')
181
+ boillenger_channel_height_canvas.tick_params(axis="x", which='major', labelbottom=True, rotation=45)
182
+ boillenger_channel_height_canvas.plot(date_numbers_hi, pd_hi_candles['boillenger_channel_height'], color='lightblue', linewidth=0.5, label=f"boillenger channel height (hi)")
183
+ boillenger_channel_height_canvas.plot(date_numbers_hi, pd_hi_candles['atr'], color='lightblue', linestyle='dashed', linewidth=0.5, label=f"ATR (hi)")
184
+ boillenger_channel_height_canvas.plot(date_numbers_lo, pd_lo_candles['boillenger_channel_height'], color='gray', linewidth=0.5, label=f"boillenger channel height (lo)")
185
+ boillenger_channel_height_canvas.plot(date_numbers_lo, pd_lo_candles['atr'], color='gray', linestyle='dashed', linewidth=0.5, label=f"ATR (lo)")
186
+ boillenger_channel_height_canvas.legend()
187
+
188
+ rsi_canvas = axes[2]
189
+ rsi_canvas.minorticks_on()
190
+ rsi_canvas.grid()
191
+ rsi_canvas.set_ylabel(f'RSI {key}')
192
+ rsi_canvas.tick_params(axis="x", which='major', labelbottom=True, rotation=45)
193
+ rsi_canvas.plot(date_numbers_hi, pd_hi_candles['rsi'], color='lightblue', linewidth=2, label=f"RSI (hi)")
194
+ rsi_canvas.plot(date_numbers_lo, pd_lo_candles['rsi'], color='gray', linestyle='dashed', linewidth=2, label=f"RSI (lo)")
195
+
196
+ macd_canvas_hi = axes[3]
197
+ macd_canvas_hi.minorticks_on()
198
+ macd_canvas_hi.grid()
199
+ macd_canvas_hi.set_ylabel(f'MACD hi {key}')
200
+ macd_canvas_hi.tick_params(axis="x", which='major', labelbottom=True, rotation=45)
201
+ macd_canvas_hi.plot(date_numbers_hi, pd_hi_candles['macd'], color='lightblue', linewidth=0.5, label=f"MACD (hi)")
202
+ macd_canvas_hi.plot(date_numbers_hi, pd_hi_candles['signal'], color='lightblue', linewidth=0.5, label=f"signal (hi)")
203
+ bar_colors = ['red' if value < 0 else 'green' for value in pd_hi_candles['macd_minus_signal']]
204
+ macd_canvas_hi.bar(date_numbers_hi, pd_hi_candles['macd_minus_signal'], width=0.005, color=bar_colors, label="MACD Histogram (hi)")
205
+
206
+ macd_canvas_lo = axes[4]
207
+ macd_canvas_lo.minorticks_on()
208
+ macd_canvas_lo.grid()
209
+ macd_canvas_lo.set_ylabel(f'MACD lo {key}')
210
+ macd_canvas_lo.tick_params(axis="x", which='major', labelbottom=True, rotation=45)
211
+ macd_canvas_lo.plot(date_numbers_lo, pd_lo_candles['macd'], color='gray', linewidth=0.5, label=f"MACD (lo)")
212
+ macd_canvas_lo.plot(date_numbers_lo, pd_lo_candles['signal'], color='gray', linewidth=0.5, label=f"signal (lo)")
213
+ bar_colors_lo = ['red' if value < 0 else 'green' for value in pd_lo_candles['macd_minus_signal']]
214
+ macd_canvas_lo.bar(date_numbers_lo, pd_lo_candles['macd_minus_signal'], width=0.005, color=bar_colors_lo, label="MACD Histogram (lo)")
215
+
216
+ return {
217
+ 'plt' : plt,
218
+ 'time_series_canvas' : time_series_canvas
219
+ }
220
+
221
+ def plot_segments(
222
+ pd_candles : pd.DataFrame,
223
+ ts_partitions : Dict,
224
+ jpg_filename : str = None
225
+ ):
226
+ import matplotlib.pyplot as plt
227
+ import matplotlib.gridspec as gridspec
228
+
229
+ minima = ts_partitions['minima']
230
+ maxima = ts_partitions['maxima']
231
+ segments = ts_partitions['segments']
232
+
233
+ fig = plt.figure(figsize=(15, 8), facecolor='black')
234
+ gs = gridspec.GridSpec(1, 1, height_ratios=[1])
235
+
236
+ # Price Chart
237
+ ax0 = plt.subplot(gs[0])
238
+ ax0.plot(pd_candles['datetime'], pd_candles['close'], label='Close', color='dodgerblue')
239
+ ax0.plot(pd_candles['datetime'], pd_candles['smoothed_close'], label='Smoothed Close', color='yellow')
240
+ ax0.plot(pd_candles['datetime'], pd_candles['ema_close'], label='3m EMA', linestyle='--', color='orange')
241
+ ax0.fill_between(pd_candles['datetime'], pd_candles['close'], pd_candles['ema_close'], where=(pd_candles['close'] > pd_candles['ema_close']), interpolate=True, color='dodgerblue', alpha=0.3, label='Bull Market')
242
+ ax0.fill_between(pd_candles['datetime'], pd_candles['close'], pd_candles['ema_close'], where=(pd_candles['close'] <= pd_candles['ema_close']), interpolate=True, color='red', alpha=0.3, label='Bear Market')
243
+
244
+ ax0.set_title('Close vs EMA', color='white')
245
+ ax0.set_xlabel('Date', color='white')
246
+ ax0.set_ylabel('Price', color='white')
247
+ legend = ax0.legend()
248
+ legend.get_frame().set_facecolor('black')
249
+ legend.get_frame().set_edgecolor('white')
250
+ for text in legend.get_texts():
251
+ text.set_color('white')
252
+
253
+ # @CRITICAL close vs smoothed_close and merge_distance
254
+ for maxima_index in maxima:
255
+ ax0.plot(pd_candles['datetime'][maxima_index], pd_candles['close'][maxima_index], marker='+', markersize=8, color='yellow', label='maxima')
256
+ for minima_index in minima:
257
+ ax0.plot(pd_candles['datetime'][minima_index], pd_candles['close'][minima_index], marker='o', markersize=5, color='yellow', label='minima')
258
+
259
+ for segment in segments:
260
+
261
+ ax0.axvline(x=pd_candles['datetime'][segment['end']], color='gray', linewidth=2, linestyle='--')
262
+
263
+ if 'maxima_idx_boillenger' in segment and segment['maxima_linregress_boillenger'] is not None:
264
+ '''
265
+ We don't need to compute y_series like this:
266
+ slope_maxima = segment['maxima_linregress_boillenger'].slope
267
+ intercept_maxima = segment['maxima_linregress_boillenger'].intercept
268
+ segment_maxima_dates = pd_candles['datetime'][segment['maxima_idx_boillenger']]
269
+ y_series = [ slope_maxima * idx + intercept_maxima for idx in segment['maxima_idx_boillenger'] ]
270
+ But, syntax is just for reference.
271
+ '''
272
+ x_series = [pd_candles.loc[idx]['datetime'] for idx in segment['maxima_idx_boillenger'] if idx in pd_candles.index] # x = dates
273
+ y_series = [segment['maxima_close_boillenger'][i] for i, idx in enumerate(segment['maxima_idx_boillenger']) if idx in pd_candles.index] # y = boillenger upper
274
+ ax0.plot(
275
+ x_series,
276
+ y_series,
277
+ color='green', linestyle='--', label='Maxima Linear Regression')
278
+
279
+ if 'minima_idx_boillenger' in segment and segment['minima_linregress_boillenger'] is not None:
280
+ x_series = [pd_candles.loc[idx]['datetime'] for idx in segment['minima_idx_boillenger'] if idx in pd_candles.index] # x = dates
281
+ y_series = [segment['minima_close_boillenger'][i] for i, idx in enumerate(segment['minima_idx_boillenger']) if idx in pd_candles.index] # y = boillenger lower
282
+ ax0.plot(
283
+ x_series,
284
+ y_series,
285
+ color='red', linestyle='--', label='Minima Linear Regression')
286
+
287
+ ax0.set_facecolor('black')
288
+
289
+ ax0.tick_params(axis='x', colors='white')
290
+ ax0.tick_params(axis='y', colors='white')
291
+
292
+ # Show the plot
293
+ plt.grid(True)
294
+ plt.tight_layout()
295
+
296
+ if jpg_filename:
297
+ plt.savefig(jpg_filename, format='jpg', dpi=300)
298
+
299
+ def segments_to_df(segments : List[Dict]) -> pd.DataFrame:
300
+ segments = [
301
+ {
302
+ 'start' : segment['start'],
303
+ 'end' : segment['end'],
304
+ 'start_datetime' : segment['start_datetime'] if not type(segment['start_datetime']) is str else arrow.get(segment['start_datetime']).datetime.replace(tzinfo=None),
305
+ 'end_datetime' : segment['end_datetime'] if not type(segment['end_datetime']) is str else arrow.get(segment['end_datetime']).datetime.replace(tzinfo=None),
306
+ 'start_close' : segment['start_close'],
307
+ 'end_close' : segment['end_close'],
308
+ 'window_size_num_intervals' : segment['window_size_num_intervals'],
309
+ 'cur_recur_depth' : segment['cur_recur_depth'],
310
+ 'up_or_down' : segment['up_or_down'],
311
+ 'class' : segment['class'],
312
+ 'maxima_linregress_slope' : segment['maxima_linregress_full'].slope,
313
+ 'maxima_linregress_intercept' : segment['maxima_linregress_full'].intercept,
314
+ 'maxima_linregress_std_err' : segment['maxima_linregress_full'].stderr,
315
+ 'minima_linregress_slope' : segment['minima_linregress_full'].slope,
316
+ 'minima_linregress_intercept' : segment['minima_linregress_full'].intercept,
317
+ 'minima_linregress_std_err' : segment['minima_linregress_full'].stderr
318
+
319
+ }
320
+ for segment in segments ]
321
+ for segment in segments:
322
+ segment['start_ts'] = int(segment['start_datetime'].timestamp())
323
+ segment['end_ts'] = int(segment['end_datetime'].timestamp())
324
+ pd_segments = pd.DataFrame(segments)
325
+ return pd_segments
326
+
327
+ def generic_check_signal_thresholds(
328
+ signal_thresholds : List[Dict[str, Any]],
329
+ this_candle : Dict[str, Any],
330
+ adj_bps : float = 0
331
+ ) -> bool:
332
+ '''
333
+ WARNING!!! Do not put any strategy specific logic here!!!
334
+ Thanks.
335
+ '''
336
+ return all([
337
+ this_candle[signal['lhs']] > (this_candle[signal['rhs']] + adj_bps/10000)
338
+ if signal['op'] == '>'
339
+ else this_candle[signal['lhs']] < (this_candle[signal['rhs']] + adj_bps/10000)
340
+ for signal in signal_thresholds
341
+ ])
342
+
343
+ def generic_pnl_eval (
344
+ this_candle,
345
+ running_sl_percent_hard : float,
346
+ this_ticker_open_trades : List[Dict],
347
+ algo_param : Dict,
348
+ long_tp_indicator_name : str = None,
349
+ short_tp_indicator_name : str = None
350
+ ) -> Dict[str, float]:
351
+ '''
352
+ WARNING!!! Do not put any strategy specific logic here!!!
353
+ Thanks.
354
+ '''
355
+ unrealized_pnl_interval, unrealized_pnl_open, unrealized_pnl_live_optimistic, unrealized_pnl_live_pessimistic, unrealized_pnl_tp, unrealized_pnl_sl, unrealized_pnl_close_approx = 0, 0, 0, 0, 0, 0, 0
356
+ assert(len(set([ trade['side'] for trade in this_ticker_open_trades]))==1) # open trades should be in same direction
357
+ this_ticker_open_positions_side = this_ticker_open_trades[-1]['side']
358
+
359
+ lo_dayofweek = this_candle['dayofweek']
360
+ cautious_dayofweek : List[int] = algo_param['cautious_dayofweek']
361
+
362
+ lo_close = this_candle['close']
363
+ lo_open = this_candle['open']
364
+ lo_high = this_candle['high']
365
+ lo_low = this_candle['low']
366
+
367
+ # ATR, Fib618, bollengers are price levels. RSI/MFI..etc are not prices. Be careful.
368
+ long_tp_price = this_candle[long_tp_indicator_name] if long_tp_indicator_name else None
369
+ short_tp_price = this_candle[short_tp_indicator_name] if short_tp_indicator_name else None
370
+
371
+ _asymmetric_tp_bps = algo_param['asymmetric_tp_bps'] if lo_dayofweek in cautious_dayofweek else 0
372
+
373
+ for trade in this_ticker_open_trades:
374
+ target_price = trade['target_price'] if 'target_price' in trade else None
375
+ if not long_tp_indicator_name and not short_tp_indicator_name:
376
+ assert(target_price)
377
+
378
+ if this_ticker_open_positions_side=='buy':
379
+ unrealized_pnl_interval += (lo_close - trade['entry_price']) * trade['size']
380
+ unrealized_pnl_open += (lo_open - trade['entry_price']) * trade['size']
381
+ unrealized_pnl_live_optimistic += (lo_high - trade['entry_price']) * trade['size']
382
+ unrealized_pnl_live_pessimistic += (lo_low - trade['entry_price']) * trade['size']
383
+ unrealized_pnl_close_approx += (min(lo_close*(1+_asymmetric_tp_bps/10000), lo_high) - trade['entry_price']) * trade['size'] # Less accurate to use close price
384
+ if (
385
+ long_tp_indicator_name
386
+ and not target_price # If entry trades are tagged target_price, it should take precedence over indicator
387
+ ):
388
+ unrealized_pnl_tp += (min(long_tp_price*(1+_asymmetric_tp_bps/10000), lo_high) - trade['entry_price']) * trade['size']
389
+ else:
390
+ if target_price:
391
+ if (lo_high>target_price and lo_low<target_price):
392
+ unrealized_pnl_tp += (target_price - trade['entry_price']) * trade['size']
393
+ else:
394
+ unrealized_pnl_tp += unrealized_pnl_close_approx # This is worst, try not to estimate pnl with close price!
395
+ else:
396
+ unrealized_pnl_tp += unrealized_pnl_close_approx # This is worst, try not to estimate pnl with close price!
397
+ unrealized_pnl_sl += -1 * (trade['entry_price'] * trade['size'] * (running_sl_percent_hard/100))
398
+
399
+ else:
400
+ unrealized_pnl_interval += (trade['entry_price'] - lo_close) * trade['size']
401
+ unrealized_pnl_open += (trade['entry_price'] - lo_open) * trade['size']
402
+ unrealized_pnl_live_optimistic += (trade['entry_price'] - lo_low) * trade['size']
403
+ unrealized_pnl_live_pessimistic += (trade['entry_price'] - lo_high) * trade['size']
404
+ unrealized_pnl_close_approx += (trade['entry_price'] - max(lo_close*(1-_asymmetric_tp_bps/10000), lo_low)) * trade['size']
405
+ if (
406
+ short_tp_indicator_name
407
+ and not target_price # If entry trades are tagged target_price, it should take precedence over indicator
408
+ ):
409
+ unrealized_pnl_tp += (trade['entry_price'] - max(short_tp_price*(1-_asymmetric_tp_bps/10000), lo_low)) * trade['size']
410
+ else:
411
+ if target_price:
412
+ if (lo_high>target_price and lo_low<target_price):
413
+ unrealized_pnl_tp += (trade['entry_price'] - target_price) * trade['size']
414
+ else:
415
+ unrealized_pnl_tp += unrealized_pnl_close_approx # This is worst, try not to estimate pnl with close price!
416
+ else:
417
+ unrealized_pnl_tp += unrealized_pnl_close_approx # This is worst, try not to estimate pnl with close price!
418
+
419
+ unrealized_pnl_sl += -1 * (trade['entry_price'] * trade['size'] * (running_sl_percent_hard/100))
420
+
421
+ return {
422
+ 'unrealized_pnl_interval' : unrealized_pnl_interval,
423
+ 'unrealized_pnl_open' : unrealized_pnl_open,
424
+ 'unrealized_pnl_live_optimistic' : unrealized_pnl_live_optimistic,
425
+ 'unrealized_pnl_live_pessimistic' : unrealized_pnl_live_pessimistic,
426
+ 'unrealized_pnl_tp' : unrealized_pnl_tp,
427
+ 'unrealized_pnl_sl' : unrealized_pnl_sl
428
+ }
429
+
430
+ def generic_tp_eval (
431
+ lo_row,
432
+ this_ticker_open_trades : List[Dict]
433
+ ) -> bool:
434
+ low : float = lo_row['low']
435
+ high : float = lo_row['high']
436
+
437
+ for trade in this_ticker_open_trades:
438
+ if trade['target_price']<=high and trade['target_price']>=low:
439
+ return True
440
+ return False
441
+
442
+ def generic_sort_filter_universe(
443
+ tickers : List[str],
444
+ exchange : Exchange,
445
+
446
+ # Use "i" (row index) to find current/last interval's market data or TAs from "all_exchange_candles"
447
+ i,
448
+ all_exchange_candles : Dict[str, Dict[str, Dict[str, pd.DataFrame]]],
449
+
450
+ max_num_tickers : int = 10
451
+ ) -> List[str]:
452
+ if not tickers:
453
+ return None
454
+
455
+ sorted_filtered_tickers : List[str] = tickers.copy()
456
+
457
+ # Custom strategy specific sort logic here. Sort first before you filter!
458
+ sorted_filtered_tickers.sort()
459
+
460
+ # Custom filtering logic
461
+ if len(sorted_filtered_tickers)>max_num_tickers:
462
+ sorted_filtered_tickers = sorted_filtered_tickers[:max_num_tickers]
463
+
464
+ return sorted_filtered_tickers
465
+
466
+ @retry(num_attempts=3)
467
+ def fetch_price(
468
+ exchange,
469
+ normalized_symbol : str,
470
+ pd_reference_price_cache : pd.DataFrame,
471
+ timestamp_ms : int,
472
+ ref_timeframe : str = '1m'
473
+ ) -> float:
474
+ cached_row = pd_reference_price_cache[pd_reference_price_cache.timestamp_ms==timestamp_ms]
475
+ if cached_row.shape[0]>0:
476
+ reference_price = cached_row.iloc[-1]['price']
477
+ else:
478
+ reference_price = fetch_historical_price(
479
+ exchange=exchange,
480
+ normalized_symbol=normalized_symbol,
481
+ timestamp_ms=timestamp_ms,
482
+ ref_timeframe=ref_timeframe)
483
+ cached_row = {
484
+ 'exchange' : exchange,
485
+ 'ticker' : normalized_symbol,
486
+ 'datetime' : datetime.fromtimestamp(int(timestamp_ms/1000)),
487
+ 'datetime_utc' : datetime.fromtimestamp(int(timestamp_ms/1000), tz=timezone.utc),
488
+ 'timestamp_ms' : timestamp_ms,
489
+ 'price' : reference_price
490
+ }
491
+ # pd_reference_price_cache = pd.concat([pd_reference_price_cache, pd.DataFrame([cached_row])], axis=0, ignore_index=True)
492
+ pd_reference_price_cache.loc[len(pd_reference_price_cache)] = cached_row
493
+ return reference_price
494
+
495
+ def fetch_cycle_ath_atl(
496
+ exchange,
497
+ symbol,
498
+ timeframe,
499
+ start_date : datetime,
500
+ end_date : datetime
501
+ ):
502
+ ath = float('-inf')
503
+ atl = float('inf')
504
+ all_ohlcv = []
505
+
506
+ start_ts = int(start_date.timestamp() * 1000)
507
+ end_ts = int(end_date.timestamp() * 1000)
508
+
509
+ while start_ts < end_ts:
510
+ try:
511
+ ohlcv = exchange.fetch_ohlcv(symbol, timeframe, since=start_ts, limit=100)
512
+ if not ohlcv:
513
+ break
514
+ all_ohlcv.extend(ohlcv)
515
+ start_ts = ohlcv[-1][0] + 1
516
+ time.sleep(0.1)
517
+ except Exception as e:
518
+ print(f"fetch_cycle_ath_atl Oops: {e}")
519
+
520
+ for candle in all_ohlcv:
521
+ high = candle[2]
522
+ low = candle[3]
523
+ ath = max(ath, high)
524
+ atl = min(atl, low)
525
+
526
+ return {
527
+ 'ath' : ath,
528
+ 'atl' : atl
529
+ }
530
+
531
+ '''
532
+ ******** THE_LOOP ********
533
+
534
+ This is the loop which replay candles to back tests. No STRATEGY_SPECIFIC logic should be here!!!
535
+ '''
536
+ def run_scenario(
537
+ algo_param : Dict,
538
+ exchanges : List[Exchange],
539
+ all_exchange_candles : Dict[str, Dict[str, Dict[str, pd.DataFrame]]],
540
+ pd_ref_candles_fast : pd.DataFrame,
541
+ pd_ref_candles_slow : pd.DataFrame,
542
+ tickers : List[str],
543
+ ref_candles_partitions : Dict,
544
+ pd_hi_candles_partitions : pd.DataFrame,
545
+ pd_lo_candles_partitions : pd.DataFrame,
546
+ economic_calendars_loaded : bool,
547
+ pd_economic_calendars : pd.DataFrame,
548
+
549
+ order_notional_adj_func : Callable[..., float],
550
+ allow_entry_initial_func : Callable[..., bool],
551
+ allow_entry_final_func : Callable[..., bool],
552
+ allow_slice_entry_func : Callable[..., bool],
553
+ sl_adj_func : Callable[..., Dict[str, float]],
554
+ trailing_stop_threshold_eval_func : Callable[..., Dict[str, float]],
555
+ pnl_eval_func : Callable[..., Dict[str, float]],
556
+ tp_eval_func : Callable[..., bool],
557
+ sort_filter_universe_func : Callable[..., List[str]],
558
+
559
+ logger,
560
+
561
+ pypy_compat : bool = False,
562
+ plot_timeseries : bool = True,
563
+ ):
564
+ exceptions : Dict = {}
565
+
566
+ if not pypy_compat:
567
+ pd_ref_candles_segments = segments_to_df(ref_candles_partitions['segments'])
568
+ pd_hi_candles_segments = segments_to_df(pd_hi_candles_partitions['segments'])
569
+ pd_lo_candles_segments = segments_to_df(pd_lo_candles_partitions['segments'])
570
+
571
+ min_sl_age_ms : int = 0
572
+ if algo_param['lo_candle_size'][-1]=="m":
573
+ one_interval_ms = 60*1000
574
+ min_sl_age_ms = algo_param['sl_num_intervals_delay'] * one_interval_ms
575
+ num_intervals_block_pending_ecoevents_ms = one_interval_ms*algo_param['num_intervals_block_pending_ecoevents']
576
+ elif algo_param['lo_candle_size'][-1]=="h":
577
+ one_interval_ms = 60*60*1000
578
+ min_sl_age_ms = algo_param['sl_num_intervals_delay'] * one_interval_ms
579
+ num_intervals_block_pending_ecoevents_ms = one_interval_ms*algo_param['num_intervals_block_pending_ecoevents']
580
+ elif algo_param['lo_candle_size'][-1]=="d":
581
+ one_interval_ms = 60*60*24*1000
582
+ min_sl_age_ms = algo_param['sl_num_intervals_delay'] * one_interval_ms
583
+ num_intervals_block_pending_ecoevents_ms = one_interval_ms*algo_param['num_intervals_block_pending_ecoevents']
584
+
585
+ commission_bps = algo_param['commission_bps']
586
+
587
+ initial_cash : float = algo_param['initial_cash']
588
+ entry_percent_initial_cash : float = algo_param['entry_percent_initial_cash']
589
+ target_position_size_percent_total_equity : float = algo_param['target_position_size_percent_total_equity']
590
+
591
+ class GlobalState:
592
+ def __init__(self, initial_cash) -> None:
593
+ self.cash = initial_cash
594
+ self.total_equity = self.cash
595
+ self.total_commission = 0
596
+
597
+ gloabl_state = GlobalState(initial_cash=initial_cash) # This cash position is shared across all tickers in universe
598
+ current_position_usdt = 0
599
+
600
+ all_trades : List = []
601
+
602
+ compiled_candles_by_exchange_pairs : List[Dict[str, pd.DataFrame]]= {}
603
+ hi_num_intervals, lo_num_intervals = 99999999, 99999999
604
+
605
+ for exchange in exchanges:
606
+ for ticker in tickers:
607
+ key : str = f"{exchange.name}-{ticker}"
608
+ pd_hi_candles : pd.DataFrame = all_exchange_candles[exchange.name][ticker]['hi_candles']
609
+ pd_lo_candles : pd.DataFrame = all_exchange_candles[exchange.name][ticker]['lo_candles']
610
+
611
+ # market_data_gizmo sometimes insert dummy row(s) between start_date and actual first candle fetched
612
+ if pd_hi_candles[~pd_hi_candles.close.notna()].shape[0]>0:
613
+ pd_hi_candles.drop(pd_hi_candles[~pd_hi_candles.close.notna()].index[0], inplace=True)
614
+
615
+ hi_num_intervals = min(hi_num_intervals, pd_hi_candles.shape[0])
616
+ lo_num_intervals = min(lo_num_intervals, pd_lo_candles.shape[0])
617
+
618
+ compiled_candles_by_exchange_pairs[key] = {}
619
+ compiled_candles_by_exchange_pairs[key]['hi_candles'] = pd_hi_candles
620
+ compiled_candles_by_exchange_pairs[key]['lo_candles'] = pd_lo_candles
621
+
622
+ all_canvas = {}
623
+ if plot_timeseries:
624
+ for exchange in exchanges:
625
+ for ticker in tickers:
626
+ key = f"{exchange.name}-{ticker}"
627
+ pd_hi_candles = compiled_candles_by_exchange_pairs[key]['hi_candles']
628
+ pd_lo_candles = compiled_candles_by_exchange_pairs[key]['lo_candles']
629
+
630
+ canvas = create_plot_canvas(key, pd_hi_candles, pd_lo_candles)
631
+ all_canvas[f"{key}-param_id{algo_param['param_id']}"] = canvas
632
+
633
+ order_notional_adj_func_sig = inspect.signature(order_notional_adj_func)
634
+ order_notional_adj_func_params = order_notional_adj_func_sig.parameters.keys()
635
+ allow_entry_initial_func_sig = inspect.signature(allow_entry_initial_func)
636
+ allow_entry_initial_func_params = allow_entry_initial_func_sig.parameters.keys()
637
+ allow_entry_final_func_sig = inspect.signature(allow_entry_final_func)
638
+ allow_entry_final_func_params = allow_entry_final_func_sig.parameters.keys()
639
+ allow_slice_entry_func_sig = inspect.signature(allow_slice_entry_func)
640
+ allow_slice_entry_func_params = allow_slice_entry_func_sig.parameters.keys()
641
+ sl_adj_func_sig = inspect.signature(sl_adj_func)
642
+ sl_adj_func_params = sl_adj_func_sig.parameters.keys()
643
+ trailing_stop_threshold_eval_func_sig = inspect.signature(trailing_stop_threshold_eval_func)
644
+ trailing_stop_threshold_eval_func_params = trailing_stop_threshold_eval_func_sig.parameters.keys()
645
+ tp_eval_func_sig = inspect.signature(tp_eval_func)
646
+ tp_eval_func_params = tp_eval_func_sig.parameters.keys()
647
+ sort_filter_universe_func_sig = inspect.signature(sort_filter_universe_func)
648
+ sort_filter_universe_func_params = sort_filter_universe_func_sig.parameters.keys()
649
+
650
+ BUCKETS_m100_100 = bucket_series(
651
+ values=list([i for i in range(-100,100)]),
652
+ outlier_threshold_percent=10,
653
+ level_granularity=algo_param['default_level_granularity'] if 'default_level_granularity' in algo_param else 0.01
654
+ )
655
+
656
+ REFERENCE_PRICE_CACHE_COLUMNS = [
657
+ 'exchange', 'ticker', 'datetime', 'datetime_utc', 'timestamp_ms', 'price'
658
+ ]
659
+ reference_price_cache = {}
660
+
661
+ def _max_camp(
662
+ camp1 : bool,
663
+ camp2 : bool,
664
+ camp3 : bool
665
+ ) -> int:
666
+ camp : int = 1 if camp1 else 0
667
+ if camp2:
668
+ camp = 2
669
+ if camp3:
670
+ camp =3
671
+ return camp
672
+ REVERSAL_CAMP_ITEM = {
673
+ 'camp1' : False,
674
+ 'camp2' : False,
675
+ 'camp3' : False,
676
+ 'camp1_price' : None,
677
+ 'camp2_price' : None,
678
+ 'camp3_price' : None,
679
+
680
+ 'datetime' : None # Last update
681
+ }
682
+ reversal_camp_cache = {}
683
+ lo_boillenger_lower_breached_cache = {}
684
+ lo_boillenger_upper_breached_cache = {}
685
+ ath, atl = None, None
686
+ target_order_notional = 0
687
+ for i in range(algo_param['how_many_last_candles'], lo_num_intervals):
688
+ for exchange in exchanges:
689
+
690
+ kwargs = {k: v for k, v in locals().items() if k in sort_filter_universe_func_params}
691
+ sorted_filtered_tickers = sort_filter_universe_func(**kwargs)
692
+
693
+ for ticker in sorted_filtered_tickers:
694
+ key = f"{exchange.name}-{ticker}"
695
+ if key not in reversal_camp_cache:
696
+ reversal_camp_cache[key] = REVERSAL_CAMP_ITEM.copy()
697
+
698
+ pd_reference_price_cache : pd.DataFrame = None
699
+ reference_price_cache_file : str = f"refpx_{ticker.replace('/','').replace(':','')}.csv"
700
+ if reference_price_cache_file not in reference_price_cache:
701
+ if os.path.isfile(reference_price_cache_file):
702
+ pd_reference_price_cache = pd.read_csv(reference_price_cache_file)
703
+ pd_reference_price_cache.drop(pd_reference_price_cache.columns[pd_reference_price_cache.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
704
+ reference_price_cache[reference_price_cache_file] = pd_reference_price_cache
705
+ else:
706
+ pd_reference_price_cache = reference_price_cache[reference_price_cache_file]
707
+ if reference_price_cache_file not in reference_price_cache:
708
+ pd_reference_price_cache = pd.DataFrame(columns=REFERENCE_PRICE_CACHE_COLUMNS)
709
+ reference_price_cache[reference_price_cache_file] = pd_reference_price_cache
710
+
711
+ pd_candles = compiled_candles_by_exchange_pairs[key]
712
+ pd_hi_candles = pd_candles['hi_candles']
713
+ pd_lo_candles = pd_candles['lo_candles']
714
+
715
+ lo_row = pd_lo_candles.iloc[i]
716
+ lo_row_tm1 = pd_lo_candles.iloc[i-1]
717
+
718
+ lo_datetime = lo_row['datetime']
719
+ tm1 = lo_row_tm1['datetime']
720
+
721
+ lo_year = lo_row['year']
722
+ lo_month = lo_row['month']
723
+ lo_day = lo_row['day']
724
+ lo_hour = lo_row['hour']
725
+ lo_minute = lo_row['minute']
726
+ lo_timestamp_ms = lo_row['timestamp_ms']
727
+ lo_dayofweek = lo_row['dayofweek']
728
+ lo_open = lo_row['open']
729
+ lo_high = lo_row['high']
730
+ lo_low = lo_row['low']
731
+ lo_mid = (lo_high + lo_low)/2
732
+ lo_close = lo_row['close']
733
+ lo_candle_open_close = lo_open - lo_close
734
+ lo_candle_hi_lo = lo_high - lo_low
735
+ lo_volume = lo_row['volume']
736
+ lo_atr = lo_row['atr']
737
+ lo_rsi = lo_row['rsi']
738
+ lo_rsi_bucket = lo_row['rsi_bucket']
739
+ lo_rsi_trend = lo_row['rsi_trend']
740
+ lo_mfi = lo_row['mfi']
741
+ lo_mfi_bucket = lo_row['mfi_bucket']
742
+ lo_macd_minus_signal = lo_row['macd_minus_signal']
743
+ lo_boillenger_upper = lo_row['boillenger_upper']
744
+ lo_boillenger_lower = lo_row['boillenger_lower']
745
+ lo_boillenger_mid = (lo_boillenger_upper + lo_boillenger_lower) / 2
746
+ lo_boillenger_height = lo_boillenger_upper - lo_boillenger_lower
747
+ lo_boillenger_channel_height = lo_row['boillenger_channel_height']
748
+ lo_aggressive_up = lo_row['aggressive_up']
749
+ lo_aggressive_down = lo_row['aggressive_down']
750
+ lo_fvg_high = lo_row['fvg_high']
751
+ lo_fvg_low = lo_row['fvg_low']
752
+ lo_hurst_exp = lo_row['hurst_exp']
753
+ lo_ema_volume_short_periods = lo_row['ema_volume_short_periods']
754
+ lo_ema_short_slope = lo_row['ema_short_slope'] if 'ema_short_slope' in pd_lo_candles.columns else 0
755
+ lo_normalized_ema_short_slope = lo_row['normalized_ema_short_slope'] if 'normalized_ema_short_slope' in pd_lo_candles.columns else 0
756
+ lo_ema_long_slope = lo_row['ema_long_slope'] if 'ema_long_slope' in pd_lo_candles.columns else 0
757
+ lo_normalized_ema_long_slope = lo_row['normalized_ema_long_slope'] if 'normalized_ema_long_slope' in pd_lo_candles.columns else 0
758
+ lo_tm1_normalized_ema_long_slope = lo_row_tm1['normalized_ema_long_slope'] if 'normalized_ema_long_slope' in pd_lo_candles.columns else 0
759
+
760
+ lo_tm1_close = lo_row_tm1['close']
761
+ lo_tm1_rsi = lo_row_tm1['rsi']
762
+ lo_tm1_rsi_bucket = lo_row_tm1['rsi_bucket']
763
+ lo_tm1_rsi_trend = lo_row_tm1['rsi_trend']
764
+
765
+ lo_max_short_periods = lo_row['max_short_periods']
766
+ lo_idmax_short_periods = int(lo_row['idmax_short_periods']) if not math.isnan(lo_row['idmax_short_periods']) else None
767
+ lo_idmax_dt_short_periods = pd_lo_candles.at[lo_idmax_short_periods, 'datetime'] if not (lo_idmax_short_periods is None or pd.isna(lo_idmax_short_periods)) else None
768
+ lo_max_long_periods = lo_row['max_long_periods']
769
+ lo_idmax_long_periods = int(lo_row['idmax_long_periods']) if not math.isnan(lo_row['idmax_long_periods']) else None
770
+ lo_idmax_dt_long_periods = pd_lo_candles.at[lo_idmax_long_periods, 'datetime'] if not (lo_idmax_long_periods is None or pd.isna(lo_idmax_long_periods)) else None
771
+
772
+ lo_tm1_max_short_periods = lo_row_tm1['max_short_periods']
773
+ lo_tm1_idmax_short_periods = int(lo_row_tm1['idmax_short_periods']) if not math.isnan(lo_row_tm1['idmax_short_periods']) else None
774
+ lo_tm1_idmax_dt_short_periods = pd_lo_candles.at[lo_tm1_idmax_short_periods, 'datetime'] if not (lo_tm1_idmax_short_periods is None or pd.isna(lo_tm1_idmax_short_periods)) else None
775
+ lo_tm1_max_long_periods = lo_row_tm1['max_long_periods']
776
+ lo_tm1_idmax_long_periods = int(lo_row_tm1['idmax_long_periods']) if not math.isnan(lo_row_tm1['idmax_long_periods']) else None
777
+ lo_tm1_idmax_dt_long_periods = pd_lo_candles.at[lo_tm1_idmax_long_periods, 'datetime'] if not (lo_tm1_idmax_long_periods is None or pd.isna(lo_tm1_idmax_long_periods)) else None
778
+
779
+ lo_min_short_periods = lo_row['min_short_periods']
780
+ lo_idmin_short_periods = int(lo_row['idmin_short_periods']) if not math.isnan(lo_row['idmin_short_periods']) else None
781
+ lo_idmin_dt_short_periods = pd_lo_candles.at[lo_idmin_short_periods,'datetime'] if not (lo_idmin_short_periods is None or pd.isna(lo_idmin_short_periods)) else None
782
+ lo_min_long_periods = lo_row['min_long_periods']
783
+ lo_idmin_long_periods = int(lo_row['idmin_long_periods']) if not math.isnan(lo_row['idmin_long_periods']) else None
784
+ lo_idmin_dt_long_periods = pd_lo_candles.at[lo_idmin_long_periods,'datetime'] if not (lo_idmin_long_periods is None or pd.isna(lo_idmin_long_periods)) else None
785
+
786
+ lo_tm1_min_short_periods = lo_row_tm1['min_short_periods']
787
+ lo_tm1_idmin_short_periods = int(lo_row_tm1['idmin_short_periods']) if not math.isnan(lo_row_tm1['idmin_short_periods']) else None
788
+ lo_tm1_idmin_dt_short_periods = pd_lo_candles.at[lo_tm1_idmin_short_periods,'datetime'] if not (lo_tm1_idmin_short_periods is None or pd.isna(lo_tm1_idmin_short_periods)) else None
789
+ lo_tm1_min_long_periods = lo_row_tm1['min_long_periods']
790
+ lo_tm1_idmin_long_periods = int(lo_row_tm1['idmin_long_periods']) if not math.isnan(lo_row_tm1['idmin_long_periods']) else None
791
+ lo_tm1_idmin_dt_long_periods = pd_lo_candles.at[lo_tm1_idmin_long_periods,'datetime'] if not (lo_tm1_idmin_long_periods is None or pd.isna(lo_tm1_idmin_long_periods)) else None
792
+
793
+ if not ath or not atl:
794
+ ath_atl = fetch_cycle_ath_atl(exchange=exchange, symbol=ticker, timeframe='1d', start_date=(algo_param['start_date'] - timedelta(days=365*4)), end_date=algo_param['start_date'])
795
+ ath = ath_atl['ath']
796
+ atl = ath_atl['atl']
797
+
798
+ if lo_close>ath:
799
+ ath = lo_close
800
+ if lo_close<atl:
801
+ atl = lo_close
802
+
803
+ # Incoming economic calendars? num_incoming_economic_calendars is used to Block entries if incoming events (total_num_ecoevents==0 to make entries).
804
+ num_impacting_economic_calendars : int = 0
805
+ num_bullish_ecoevents, num_bearish_ecoevents, total_num_ecoevents = 0, 0, 0
806
+ if economic_calendars_loaded and algo_param['block_entries_on_impacting_ecoevents']:
807
+ pd_impacting_economic_calendars = pd_economic_calendars[pd_economic_calendars.event_code.isin(algo_param['mapped_event_codes'])]
808
+ pd_impacting_economic_calendars = pd_impacting_economic_calendars[
809
+ (
810
+ (
811
+ pd_impacting_economic_calendars.calendar_item_timestamp_ms>=lo_timestamp_ms) # Incoming
812
+ & (lo_timestamp_ms>=(pd_impacting_economic_calendars.calendar_item_timestamp_ms - num_intervals_block_pending_ecoevents_ms)
813
+ )
814
+ )
815
+ |
816
+ (
817
+ (
818
+ pd_impacting_economic_calendars.calendar_item_timestamp_ms<lo_timestamp_ms) # Passed
819
+ & (lo_timestamp_ms<=(pd_impacting_economic_calendars.calendar_item_timestamp_ms + num_intervals_block_pending_ecoevents_ms/3)
820
+ )
821
+ )
822
+ ]
823
+ num_impacting_economic_calendars = pd_impacting_economic_calendars.shape[0]
824
+
825
+ if num_impacting_economic_calendars>0:
826
+ pd_passed_economic_calendars = pd_impacting_economic_calendars[pd_impacting_economic_calendars.calendar_item_timestamp_ms>(lo_timestamp_ms+one_interval_ms)] # Careful with look ahead bias
827
+ num_bullish_ecoevents = pd_passed_economic_calendars[pd_passed_economic_calendars.pos_neg=='bullish'].shape[0]
828
+ num_bearish_ecoevents = pd_passed_economic_calendars[pd_passed_economic_calendars.pos_neg=='bearish'].shape[0]
829
+ num_neutral_ecoevents = pd_passed_economic_calendars[pd_passed_economic_calendars.pos_neg=='neutral'].shape[0]
830
+
831
+ # If adj_sl_on_ecoevents==True, total_num_ecoevents is used to set sl_percent_adj
832
+ total_num_ecoevents = num_bullish_ecoevents + num_bearish_ecoevents + num_neutral_ecoevents
833
+
834
+ lo_fib_eval_result = lookup_fib_target(lo_row_tm1, pd_lo_candles)
835
+ lo_fib_short_periods_fib_target, lo_fib_short_periods_price_swing, lo_fib_long_periods_fib_target, lo_fib_long_periods_price_swing = None, None, None, None
836
+ if lo_fib_eval_result:
837
+ lo_fib_short_periods_fib_target = lo_fib_eval_result['short_periods']['fib_target']
838
+ lo_fib_long_periods_fib_target = lo_fib_eval_result['long_periods']['fib_target']
839
+
840
+ current_ref_candles_segment_index, last_ref_candles_segmment_index = -1, -1
841
+ current_ref_candles_segment, last_ref_candles_segment = None, None
842
+ current_ref_candles_segment_class, last_ref_candles_segment_class = None, None
843
+ if not pypy_compat:
844
+ if pd_ref_candles_segments[(pd_ref_candles_segments.start_ts<=lo_datetime.timestamp()) & (pd_ref_candles_segments.end_ts>lo_datetime.timestamp()) ].shape[0]>0:
845
+ current_ref_candles_segment_index = pd_ref_candles_segments[(pd_ref_candles_segments.start_ts<=lo_datetime.timestamp()) & (pd_ref_candles_segments.end_ts>lo_datetime.timestamp()) ].index.to_list()[0] # Take first
846
+ current_ref_candles_segment = pd_ref_candles_segments.iloc[current_ref_candles_segment_index]
847
+ if current_ref_candles_segment is not None and not current_ref_candles_segment.empty:
848
+ current_ref_candles_segment_class = current_ref_candles_segment['class']
849
+ last_ref_candles_segmment_index = current_ref_candles_segment_index
850
+ last_ref_candles_segment = current_ref_candles_segment
851
+ if current_ref_candles_segment_index>0:
852
+ last_ref_candles_segmment_index = current_ref_candles_segment_index-1
853
+ last_ref_candles_segment = pd_ref_candles_segments.iloc[current_ref_candles_segment_index]
854
+ if last_ref_candles_segment is not None and not last_ref_candles_segment.empty:
855
+ last_ref_candles_segment_class = last_ref_candles_segment['class']
856
+
857
+ current_hi_candles_segment_index, last_hi_candles_segmment_index = -1, -1
858
+ current_hi_candles_segment, last_hi_candles_segment = None, None
859
+ current_hi_candles_segment_class, last_hi_candles_segment_class = None, None
860
+ if not pypy_compat:
861
+ if pd_hi_candles_segments[(pd_hi_candles_segments.start_ts<=lo_datetime.timestamp()) & (pd_hi_candles_segments.end_ts>lo_datetime.timestamp()) ].shape[0]>0:
862
+ current_hi_candles_segment_index = pd_hi_candles_segments[(pd_hi_candles_segments.start_ts<=lo_datetime.timestamp()) & (pd_hi_candles_segments.end_ts>lo_datetime.timestamp()) ].index.to_list()[0] # Take first
863
+ current_hi_candles_segment = pd_hi_candles_segments.iloc[current_hi_candles_segment_index]
864
+ if current_hi_candles_segment is not None and not current_hi_candles_segment.empty:
865
+ current_hi_candles_segment_class = current_hi_candles_segment['class']
866
+ last_hi_candles_segmment_index = current_hi_candles_segment_index
867
+ last_hi_candles_segment = current_hi_candles_segment
868
+ if current_hi_candles_segment_index>0:
869
+ last_hi_candles_segmment_index = current_hi_candles_segment_index-1
870
+ last_hi_candles_segment = pd_hi_candles_segments.iloc[current_hi_candles_segment_index]
871
+ if last_hi_candles_segment is not None and not last_hi_candles_segment.empty:
872
+ last_hi_candles_segment_class = last_hi_candles_segment['class']
873
+
874
+ current_lo_candles_segment_index, last_lo_candles_segmment_index = -1, -1
875
+ current_lo_candles_segment, last_lo_candles_segment = None, None
876
+ current_lo_candles_segment_class, last_lo_candles_segment_class = None, None
877
+ if not pypy_compat:
878
+ if pd_lo_candles_segments[(pd_lo_candles_segments.start_ts<=lo_datetime.timestamp()) & (pd_lo_candles_segments.end_ts>lo_datetime.timestamp()) ].shape[0]>0:
879
+ current_lo_candles_segment_index = pd_lo_candles_segments[(pd_lo_candles_segments.start_ts<=lo_datetime.timestamp()) & (pd_lo_candles_segments.end_ts>lo_datetime.timestamp()) ].index.to_list()[0] # Take first
880
+ current_lo_candles_segment = pd_lo_candles_segments.iloc[current_lo_candles_segment_index]
881
+ if current_lo_candles_segment is not None and not current_lo_candles_segment.empty:
882
+ current_lo_candles_segment_class = current_lo_candles_segment['class']
883
+ last_lo_candles_segmment_index = current_lo_candles_segment_index
884
+ last_lo_candles_segment = current_lo_candles_segment
885
+ if current_lo_candles_segment_index>0:
886
+ last_lo_candles_segmment_index = current_lo_candles_segment_index-1
887
+ last_lo_candles_segment = pd_lo_candles_segments.iloc[current_lo_candles_segment_index]
888
+ if last_lo_candles_segment is not None and not last_lo_candles_segment.empty:
889
+ last_lo_candles_segment_class = last_lo_candles_segment['class']
890
+
891
+ # Find corresponding row in pd_hi_candles
892
+ def _find_ref_row(lo_year, lo_month, lo_day, pd_ref_candles):
893
+ ref_row = None
894
+ ref_matching_rows = pd_ref_candles[(pd_ref_candles.year==lo_year) & (pd_ref_candles.month==lo_month) & (pd_ref_candles.day==lo_day)]
895
+ if not ref_matching_rows.empty:
896
+ ref_row = ref_matching_rows.iloc[0]
897
+ ref_row.has_inflection_point = False
898
+
899
+ recent_rows = pd_ref_candles[(pd_ref_candles['datetime'] <= lo_datetime)].tail(3)
900
+ if not recent_rows['close_above_or_below_ema'].isna().all():
901
+ ref_row.has_inflection_point = True
902
+
903
+ else:
904
+ logger.warning(f"{key} ref_row not found for year: {lo_year}, month: {lo_month}, day: {lo_day}")
905
+
906
+ return ref_row
907
+
908
+ def _search_hi_tm1(hi_row, lo_row, pd_hi_candles):
909
+ row_index = hi_row.name -1
910
+ hi_row_tm1 = pd_hi_candles.iloc[row_index] if hi_row is not None else None
911
+ hi_row_tm1 = hi_row_tm1 if hi_row_tm1['timestamp_ms'] < lo_row['timestamp_ms'] else None
912
+ if row_index>1:
913
+ while hi_row_tm1['timestamp_ms'] >= lo_row['timestamp_ms']:
914
+ row_index = row_index -1
915
+ hi_row_tm1 = pd_hi_candles.iloc[row_index]
916
+ return hi_row_tm1
917
+
918
+ hi_row, hi_row_tm1 = None, None
919
+ if lo_datetime>=algo_param['start_date']:
920
+ if algo_param['lo_candle_size'][-1]=="m":
921
+ matching_rows = pd_hi_candles[(pd_hi_candles.year==lo_year) & (pd_hi_candles.month==lo_month) & (pd_hi_candles.day==lo_day) & (pd_hi_candles.hour==lo_hour)]
922
+ if not matching_rows.empty:
923
+ hi_row = matching_rows.iloc[0]
924
+
925
+ else:
926
+ logger.warning(f"{key} hi_row not found for year: {lo_year}, month: {lo_month}, day: {lo_day}, hour: {lo_hour}")
927
+ continue
928
+
929
+ hi_row_tm1 = _search_hi_tm1(hi_row, lo_row, pd_hi_candles)
930
+ if hi_row_tm1 is not None:
931
+ assert(hi_row_tm1['timestamp_ms'] < lo_row['timestamp_ms']) # No look ahead bias!!!
932
+ else:
933
+ continue
934
+
935
+ # Be careful with look ahead bias!!!
936
+ target_ref_candle_date = lo_datetime + timedelta(days=-1)
937
+ ref_row_fast = _find_ref_row(
938
+ target_ref_candle_date.year,
939
+ target_ref_candle_date.month,
940
+ target_ref_candle_date.day,
941
+ pd_ref_candles_fast)
942
+ ref_row_slow = _find_ref_row(
943
+ target_ref_candle_date.year,
944
+ target_ref_candle_date.month,
945
+ target_ref_candle_date.day,
946
+ pd_ref_candles_slow)
947
+
948
+ elif algo_param['lo_candle_size'][-1]=="h":
949
+ matching_rows = pd_hi_candles[(pd_hi_candles.year==lo_year) & (pd_hi_candles.month==lo_month) & (pd_hi_candles.day==lo_day)]
950
+ if not matching_rows.empty:
951
+ hi_row = matching_rows.iloc[0]
952
+
953
+ else:
954
+ logger.warning(f"{key} hi_row not found for year: {lo_year}, month: {lo_month}, day: {lo_day}")
955
+ continue
956
+
957
+ hi_row_tm1 = _search_hi_tm1(hi_row, lo_row, pd_hi_candles)
958
+ if hi_row_tm1 is not None:
959
+ assert(hi_row_tm1['timestamp_ms'] < lo_row['timestamp_ms']) # No look ahead bias!!!
960
+ else:
961
+ continue
962
+
963
+ # Be careful with look ahead bias!!!
964
+ target_ref_candle_date = lo_datetime + timedelta(days=-1)
965
+ ref_row_fast = _find_ref_row(
966
+ target_ref_candle_date.year,
967
+ target_ref_candle_date.month,
968
+ target_ref_candle_date.day,
969
+ pd_ref_candles_fast)
970
+ ref_row_slow = _find_ref_row(
971
+ target_ref_candle_date.year,
972
+ target_ref_candle_date.month,
973
+ target_ref_candle_date.day,
974
+ pd_ref_candles_slow)
975
+
976
+ elif algo_param['lo_candle_size'][-1]=="d":
977
+ # Not supported atm
978
+ hi_row, hi_row_tm1 = None, None
979
+
980
+ hi_datetime, hi_year, hi_month, hi_day, hi_hour, hi_minute, hi_timestamp_ms = None, None, None, None, None, None, None
981
+ hi_open, hi_high, hi_low, hi_close, hi_volume = None, None, None, None, None
982
+ hi_atr, hi_rsi, hi_rsi_bucket, hi_rsi_trend, hi_mfi, hi_mfi_bucket, hi_macd_minus_signal, hi_boillenger_upper, hi_boillenger_lower, hi_boillenger_channel_height = None, None, None, None, None, None, None, None, None, None
983
+ hi_hurst_exp, hi_ema_volume_long_periods = None, None
984
+ hi_tm1_rsi, hi_tm1_rsi_bucket, hi_tm1_rsi_trend = None, None, None
985
+ hi_fib_eval_result = None
986
+ if hi_row is not None:
987
+ hi_datetime = hi_row['datetime']
988
+ hi_year = hi_row['year']
989
+ hi_month = hi_row['month']
990
+ hi_day = hi_row['day']
991
+ hi_hour = hi_row['hour']
992
+ hi_minute = hi_row['minute']
993
+ hi_timestamp_ms = hi_row['timestamp_ms']
994
+ hi_open = hi_row['open']
995
+ hi_high = hi_row['high']
996
+ hi_low = hi_row['low']
997
+ hi_close = hi_row['close']
998
+ hi_volume = hi_row['volume']
999
+ hi_atr = hi_row['atr']
1000
+ hi_rsi = hi_row['rsi']
1001
+ hi_rsi_bucket = hi_row['rsi_bucket']
1002
+ hi_rsi_trend = hi_row['rsi_trend']
1003
+ hi_mfi = hi_row['mfi']
1004
+ hi_mfi_bucket = hi_row['mfi_bucket']
1005
+ hi_macd_minus_signal = hi_row['macd_minus_signal']
1006
+ hi_boillenger_upper = hi_row['boillenger_upper']
1007
+ hi_boillenger_lower = hi_row['boillenger_lower']
1008
+ hi_boillenger_channel_height = hi_row['boillenger_channel_height']
1009
+ hi_hurst_exp = hi_row['hurst_exp']
1010
+
1011
+ hi_tm1_rsi = hi_row_tm1['rsi']
1012
+ hi_tm1_rsi_bucket = hi_row_tm1['rsi_bucket']
1013
+ hi_tm1_rsi_trend = hi_row_tm1['rsi_trend']
1014
+
1015
+ hi_ema_volume_long_periods = hi_row['ema_volume_long_periods']
1016
+ hi_ema_short_slope = hi_row['ema_short_slope'] if 'ema_short_slope' in pd_hi_candles.columns else 0
1017
+ hi_normalized_ema_short_slope = hi_row['normalized_ema_short_slope'] if 'normalized_ema_short_slope' in pd_hi_candles.columns else 0
1018
+ hi_ema_long_slope = hi_row['ema_long_slope'] if 'ema_long_slope' in pd_hi_candles.columns else 0
1019
+ hi_normalized_ema_long_slope = hi_row['normalized_ema_long_slope'] if 'normalized_ema_long_slope' in pd_hi_candles.columns else 0
1020
+ hi_tm1_normalized_ema_long_slope = hi_row_tm1['normalized_ema_long_slope'] if 'normalized_ema_long_slope' in pd_hi_candles.columns else 0
1021
+
1022
+ hi_max_short_periods = hi_row['max_short_periods']
1023
+ hi_idmax_short_periods = int(hi_row['idmax_short_periods']) if not math.isnan(hi_row['idmax_short_periods']) else None
1024
+ hi_idmax_dt_short_periods = pd_hi_candles.at[hi_idmax_short_periods,'datetime'] if not(hi_idmax_short_periods is None or pd.isna(hi_idmax_short_periods)) else None
1025
+ hi_max_long_periods = hi_row['max_long_periods']
1026
+ hi_idmax_long_periods = int(hi_row['idmax_long_periods']) if not math.isnan(hi_row['idmax_long_periods']) else None
1027
+ hi_idmax_dt_long_periods = pd_hi_candles.at[hi_idmax_long_periods,'datetime'] if not(hi_idmax_long_periods is None or pd.isna(hi_idmax_long_periods)) else None
1028
+
1029
+ hi_tm1_max_short_periods = hi_row_tm1['max_short_periods']
1030
+ hi_tm1_idmax_short_periods = int(hi_row_tm1['idmax_short_periods']) if not math.isnan(hi_row_tm1['idmax_short_periods']) else None
1031
+ hi_tm1_idmax_dt_short_periods = pd_hi_candles.at[hi_tm1_idmax_short_periods,'datetime'] if not(hi_tm1_idmax_short_periods is None or pd.isna(hi_tm1_idmax_short_periods)) else None
1032
+ hi_tm1_max_long_periods = hi_row_tm1['max_long_periods']
1033
+ hi_tm1_idmax_long_periods = int(hi_row_tm1['idmax_long_periods']) if not math.isnan(hi_row_tm1['idmax_long_periods']) else None
1034
+ hi_tm1_idmax_dt_long_periods = pd_hi_candles.at[hi_tm1_idmax_long_periods,'datetime'] if not(hi_tm1_idmax_long_periods is None or pd.isna(hi_tm1_idmax_long_periods)) else None
1035
+
1036
+ hi_min_short_periods = hi_row['min_short_periods']
1037
+ hi_idmin_short_periods = int(hi_row['idmin_short_periods']) if not math.isnan(hi_row['idmin_short_periods']) else None
1038
+ hi_idmin_dt_short_periods = pd_hi_candles.at[hi_idmin_short_periods,'datetime'] if not (hi_idmin_short_periods is None or pd.isna(hi_idmin_short_periods)) else None
1039
+ hi_min_long_periods = hi_row['min_long_periods']
1040
+ hi_idmin_long_periods = int(hi_row['idmin_long_periods']) if not math.isnan(hi_row['idmin_long_periods']) else None
1041
+ hi_idmin_dt_long_periods = pd_hi_candles.at[hi_idmin_long_periods,'datetime'] if not (hi_idmin_long_periods is None or pd.isna(hi_idmin_long_periods)) else None
1042
+
1043
+ hi_tm1_min_short_periods = hi_row_tm1['min_short_periods']
1044
+ hi_tm1_idmin_short_periods = int(hi_row_tm1['idmin_short_periods']) if not math.isnan(hi_row_tm1['idmin_short_periods']) else None
1045
+ hi_tm1_idmin_dt_short_periods = pd_hi_candles.at[hi_tm1_idmin_short_periods,'datetime'] if not (hi_tm1_idmin_short_periods is None or pd.isna(hi_tm1_idmin_short_periods)) else None
1046
+ hi_tm1_min_long_periods = hi_row_tm1['min_long_periods']
1047
+ hi_tm1_idmin_long_periods = int(hi_row_tm1['idmin_long_periods']) if not math.isnan(hi_row_tm1['idmin_long_periods']) else None
1048
+ hi_tm1_idmin_dt_long_periods = pd_hi_candles.at[hi_tm1_idmin_long_periods,'datetime'] if not (hi_tm1_idmin_long_periods is None or pd.isna(hi_tm1_idmin_long_periods)) else None
1049
+
1050
+ hi_fib_eval_result = lookup_fib_target(hi_row_tm1, pd_hi_candles)
1051
+ hi_fib_short_periods_fib_target, hi_fib_short_periods_price_swing, hi_fib_long_periods_fib_target, hi_fib_long_periods_price_swing = None, None, None, None
1052
+ if hi_fib_eval_result:
1053
+ hi_fib_short_periods_fib_target = hi_fib_eval_result['short_periods']['fib_target']
1054
+ hi_fib_long_periods_fib_target = hi_fib_eval_result['long_periods']['fib_target']
1055
+
1056
+ last_candles, post_move_candles, post_move_price_change, post_move_price_change_percent = None, None, None, None
1057
+ if algo_param['last_candles_timeframe']=='lo':
1058
+ last_candles = pd_lo_candles[pd_lo_candles['timestamp_ms']<=lo_timestamp_ms].tail(algo_param['how_many_last_candles']).to_dict('records')
1059
+ assert(all([ candle['timestamp_ms']<=lo_timestamp_ms for candle in last_candles ]))
1060
+ post_move_candles = pd_lo_candles[pd_lo_candles['timestamp_ms']<=lo_timestamp_ms].tail(algo_param['post_move_num_intervals']).to_dict('records')
1061
+
1062
+ elif algo_param['last_candles_timeframe']=='hi' and hi_row is not None:
1063
+ last_candles = pd_hi_candles[pd_hi_candles['timestamp_ms']<=hi_timestamp_ms].tail(algo_param['how_many_last_candles']).to_dict('records')
1064
+ assert(all([ candle['timestamp_ms']<=hi_timestamp_ms for candle in last_candles ]))
1065
+ post_move_candles = pd_hi_candles[pd_hi_candles['timestamp_ms']<=hi_timestamp_ms].tail(algo_param['post_move_num_intervals']).to_dict('records')
1066
+
1067
+ post_move_price_change, post_move_price_change_percent = 0, 0
1068
+ if post_move_candles and len(post_move_candles)>=2:
1069
+ post_move_price_change = post_move_candles[-1]['close'] - post_move_candles[0]['open']
1070
+ post_move_price_change_percent = 0
1071
+ if post_move_price_change>0:
1072
+ post_move_price_change_percent = (post_move_candles[-1]['close']/post_move_candles[0]['open'] -1) * 100
1073
+ else:
1074
+ post_move_price_change_percent = -(post_move_candles[0]['close']/post_move_candles[-1]['open'] -1) * 100
1075
+
1076
+ ref_close_fast, ref_ema_close_fast = None, None
1077
+ if ref_row_fast is not None:
1078
+ ref_close_fast = ref_row_fast['close']
1079
+ ref_ema_close_fast = ref_row_fast['ema_close']
1080
+
1081
+ ref_close_slow, ref_ema_close_slow = None, None
1082
+ if ref_row_slow is not None:
1083
+ ref_close_slow = ref_row_slow['close']
1084
+ ref_ema_close_slow = ref_row_slow['ema_close']
1085
+
1086
+ # POSITION NOTIONAL MARKING lo_low, lo_high. pessimistic!
1087
+ def _refresh_current_position(timestamp_ms):
1088
+ current_position_usdt_buy = sum([x['size'] * lo_close for x in all_trades if not x['closed'] and x['side']=='buy'])
1089
+ current_position_usdt_sell = sum([x['size'] * lo_close for x in all_trades if not x['closed'] and x['side']=='sell'])
1090
+ current_position_usdt = current_position_usdt_buy + current_position_usdt_sell
1091
+ this_ticker_historical_trades = [ trade for trade in all_trades if trade['symbol']==ticker ]
1092
+ this_ticker_open_trades = [ trade for trade in this_ticker_historical_trades if not trade['closed'] ]
1093
+ this_ticker_current_position_usdt_buy = sum([x['size'] * lo_close for x in this_ticker_open_trades if x['side']=='buy'])
1094
+ this_ticker_current_position_usdt_sell = sum([x['size'] * lo_close for x in this_ticker_open_trades if x['side']=='sell'])
1095
+
1096
+ entries_since_sl : Union[int, None] = -1
1097
+
1098
+ avg_entry_price = None
1099
+ pos_side = '---'
1100
+ max_trade_age_ms = timestamp_ms
1101
+ if this_ticker_open_trades:
1102
+ max_trade_age_ms = timestamp_ms - max([trade['timestamp_ms'] for trade in this_ticker_open_trades ])
1103
+
1104
+ avg_entry_price = sum([ trade['entry_price']*trade['size'] for trade in this_ticker_open_trades]) / sum([ trade['size'] for trade in this_ticker_open_trades])
1105
+
1106
+ sides = [ x['side'] for x in this_ticker_open_trades ]
1107
+ if len(set(sides))==1:
1108
+ if sides[0]=='buy':
1109
+ pos_side = 'buy'
1110
+ else:
1111
+ pos_side = 'sell'
1112
+
1113
+ max_sl_trade_age_ms = None
1114
+ this_ticker_sl_trades = [ trade for trade in this_ticker_historical_trades if trade['reason']=='SL' ]
1115
+ if this_ticker_sl_trades:
1116
+ last_sl_timestamp_ms = max([trade['timestamp_ms'] for trade in this_ticker_sl_trades ])
1117
+ max_sl_trade_age_ms = timestamp_ms - last_sl_timestamp_ms
1118
+ entries_since_sl = len([trade for trade in this_ticker_historical_trades if trade['timestamp_ms']>last_sl_timestamp_ms and trade['reason']=='entry' and trade['closed']] )
1119
+
1120
+ # In single legged trading, we either long or short for a particular ticker at any given moment
1121
+ assert(
1122
+ (this_ticker_current_position_usdt_buy>=0 and this_ticker_current_position_usdt_sell==0)
1123
+ or (this_ticker_current_position_usdt_buy==0 and this_ticker_current_position_usdt_sell>=0))
1124
+
1125
+ if this_ticker_current_position_usdt_buy>0:
1126
+ this_ticker_open_positions_side = 'buy'
1127
+ this_ticker_current_position_usdt = this_ticker_current_position_usdt_buy
1128
+ elif this_ticker_current_position_usdt_sell>0:
1129
+ this_ticker_open_positions_side = 'sell'
1130
+ this_ticker_current_position_usdt = this_ticker_current_position_usdt_sell
1131
+ else:
1132
+ this_ticker_open_positions_side = 'flat'
1133
+ this_ticker_current_position_usdt = 0
1134
+
1135
+ return {
1136
+ 'avg_entry_price' : avg_entry_price,
1137
+ 'side' : pos_side,
1138
+ 'current_position_usdt_buy' : current_position_usdt_buy,
1139
+ 'current_position_usdt_sell' : current_position_usdt_sell,
1140
+ 'current_position_usdt' : current_position_usdt,
1141
+ 'this_ticker_open_trades' : this_ticker_open_trades,
1142
+ 'this_ticker_current_position_usdt_buy' : this_ticker_current_position_usdt_buy,
1143
+ 'this_ticker_current_position_usdt_sell' : this_ticker_current_position_usdt_sell,
1144
+ 'this_ticker_open_positions_side' : this_ticker_open_positions_side,
1145
+ 'this_ticker_current_position_usdt' : this_ticker_current_position_usdt,
1146
+ 'max_trade_age_ms' : max_trade_age_ms,
1147
+ 'max_sl_trade_age_ms' : max_sl_trade_age_ms,
1148
+ 'entries_since_sl' : entries_since_sl
1149
+ }
1150
+
1151
+ current_positions_info = _refresh_current_position(lo_timestamp_ms)
1152
+ avg_entry_price = current_positions_info['avg_entry_price']
1153
+ pos_side = current_positions_info['side']
1154
+ current_position_usdt_buy = current_positions_info['current_position_usdt_buy']
1155
+ current_position_usdt_sell = current_positions_info['current_position_usdt_sell']
1156
+ current_position_usdt = current_positions_info['current_position_usdt']
1157
+ this_ticker_open_trades = current_positions_info['this_ticker_open_trades']
1158
+ this_ticker_current_position_usdt_buy = current_positions_info['this_ticker_current_position_usdt_buy']
1159
+ this_ticker_current_position_usdt_sell = current_positions_info['this_ticker_current_position_usdt_sell']
1160
+ this_ticker_open_positions_side = current_positions_info['this_ticker_open_positions_side']
1161
+ this_ticker_current_position_usdt = current_positions_info['this_ticker_current_position_usdt']
1162
+ max_trade_age_ms = current_positions_info['max_trade_age_ms']
1163
+ max_sl_trade_age_ms = current_positions_info['max_sl_trade_age_ms']
1164
+ entries_since_sl = current_positions_info['entries_since_sl']
1165
+ block_entry_since_last_sl = True if max_sl_trade_age_ms and max_sl_trade_age_ms<=min_sl_age_ms else False
1166
+
1167
+ def _close_open_positions(
1168
+ key, ticker,
1169
+ this_ticker_current_position_usdt,
1170
+ this_ticker_open_positions_side,
1171
+ current_position_usdt,
1172
+ trade_pnl,
1173
+ effective_tp_trailing_percent,
1174
+ row,
1175
+ reason,
1176
+ reason2,
1177
+ gloabl_state, all_trades, all_canvas,
1178
+ algo_param,
1179
+ standard_pnl_percent_buckets=BUCKETS_m100_100
1180
+ ):
1181
+ def _gains_losses_to_label(gains_losses_percent):
1182
+ gains_losses_percent_label = bucketize_val(gains_losses_percent, buckets=standard_pnl_percent_buckets)
1183
+
1184
+ if gains_losses_percent>=0:
1185
+ return f"gain {gains_losses_percent_label}%"
1186
+ else:
1187
+ return f"loss {gains_losses_percent_label}%"
1188
+
1189
+ def _how_long_before_closed_sec_to_label(how_long_before_closed_sec):
1190
+ how_long_before_closed_sec_label = None
1191
+ how_long_before_closed_hr = how_long_before_closed_sec/(60*60)
1192
+ if how_long_before_closed_hr<=1:
1193
+ how_long_before_closed_sec_label = "<=1hr"
1194
+ elif how_long_before_closed_hr>1 and how_long_before_closed_hr<=8:
1195
+ how_long_before_closed_sec_label = ">1hr <=8hr"
1196
+ elif how_long_before_closed_hr>8 and how_long_before_closed_hr<=24:
1197
+ how_long_before_closed_sec_label = ">8hr <=24hr"
1198
+ elif how_long_before_closed_hr>24 and how_long_before_closed_hr<=24*7:
1199
+ how_long_before_closed_sec_label = ">24hr <=7days"
1200
+ elif how_long_before_closed_hr>24*7 and how_long_before_closed_hr<=24*7*2:
1201
+ how_long_before_closed_sec_label = ">7days <=14days"
1202
+ else:
1203
+ how_long_before_closed_sec_label = ">14days"
1204
+ return how_long_before_closed_sec_label
1205
+
1206
+ this_ticker_open_trades = [ trade for trade in all_trades if not trade['closed'] and trade['symbol']==ticker]
1207
+
1208
+ entry_dt = min([ trade['trade_datetime'] for trade in this_ticker_open_trades ])
1209
+ entry_dayofweek = entry_dt.dayofweek
1210
+ entry_hour = entry_dt.hour
1211
+
1212
+ this_datetime = row['datetime']
1213
+ this_timestamp_ms = row['timestamp_ms']
1214
+ dayofweek = row['dayofweek']
1215
+ high = row['high']
1216
+ low = row['low']
1217
+ close = row['close']
1218
+ ema_short_slope = row['ema_short_slope'] if 'ema_short_slope' in row else None
1219
+ ema_long_slope = row['ema_long_slope'] if 'ema_long_slope' in row else None
1220
+
1221
+ # Step 1. mark open trades as closed first
1222
+ entry_commission, exit_commission = 0, 0
1223
+ for trade in this_ticker_open_trades:
1224
+ entry_commission += trade['commission']
1225
+ if this_ticker_open_positions_side=='buy':
1226
+ exit_commission += close * trade['size'] * commission_bps / 10000
1227
+
1228
+ else:
1229
+ exit_commission += close * trade['size'] * commission_bps / 10000
1230
+ trade['trade_pnl'] = 0 # trade_pnl parked under closing trade
1231
+ trade['trade_pnl_bps'] = 0
1232
+ trade['closed'] = True
1233
+ max_pain = min([ trade['max_pain'] for trade in this_ticker_open_trades])
1234
+ max_pain_percent = max_pain/this_ticker_current_position_usdt * 100
1235
+ max_pain_percent_label = _gains_losses_to_label(max_pain_percent)
1236
+
1237
+ timestamp_ms_from_closed_trades = min([ trade['timestamp_ms'] for trade in this_ticker_open_trades])
1238
+ num_impacting_economic_calendars = min([ trade['num_impacting_economic_calendars'] if 'num_impacting_economic_calendars' in trade else 0 for trade in this_ticker_open_trades])
1239
+ max_camp = max([ trade['max_camp'] for trade in this_ticker_open_trades])
1240
+ entry_post_move_price_change_percent = max([ trade['post_move_price_change_percent'] if 'post_move_price_change_percent' in trade else 0 for trade in this_ticker_open_trades ])
1241
+
1242
+ # Step 2. Update global_state
1243
+ trade_pnl_less_comm = trade_pnl - (entry_commission + exit_commission)
1244
+ gains_losses_percent = trade_pnl_less_comm/this_ticker_current_position_usdt * 100
1245
+ gains_losses_percent_label = _gains_losses_to_label(gains_losses_percent)
1246
+ how_long_before_closed_sec = (this_timestamp_ms - timestamp_ms_from_closed_trades) / 1000
1247
+ how_long_before_closed_sec_label = _how_long_before_closed_sec_to_label(how_long_before_closed_sec)
1248
+
1249
+ gloabl_state.total_equity += trade_pnl_less_comm
1250
+ gloabl_state.total_commission += exit_commission
1251
+ cash_before = gloabl_state.cash
1252
+ gloabl_state.cash = gloabl_state.total_equity
1253
+ cash_after = gloabl_state.cash
1254
+ running_total_num_positions : int = len([ 1 for x in all_trades if x['reason']=='entry' and not x['closed']])
1255
+
1256
+ # Step 3. closing trade
1257
+ # closing_price = low if this_ticker_open_positions_side=='buy' else high # pessimistic!
1258
+ closing_price = close
1259
+ closing_trade = {
1260
+ 'trade_datetime' : this_datetime,
1261
+ 'timestamp_ms' : this_timestamp_ms,
1262
+ 'dayofweek' : dayofweek,
1263
+ 'entry_dt' : entry_dt,
1264
+ 'entry_dayofweek' : entry_dayofweek,
1265
+ 'entry_hour' : entry_hour,
1266
+ 'exchange' : exchange.name,
1267
+ 'symbol' : ticker,
1268
+ 'side' : 'sell' if this_ticker_open_positions_side=='buy' else 'buy',
1269
+ 'size' : this_ticker_current_position_usdt / closing_price, # in base ccy
1270
+ 'entry_price' : closing_price, # pessimistic!
1271
+ 'closed' : True,
1272
+ 'reason' : reason,
1273
+ 'reason2' : reason2,
1274
+ 'total_equity' : gloabl_state.total_equity,
1275
+ 'this_ticker_current_position_usdt' : this_ticker_current_position_usdt,
1276
+ 'current_position_usdt' : current_position_usdt,
1277
+ 'running_total_num_positions' : running_total_num_positions,
1278
+ 'cash_before' : cash_before,
1279
+ 'cash_after' : cash_after,
1280
+ 'order_notional' : this_ticker_current_position_usdt,
1281
+ 'trade_pnl' : trade_pnl,
1282
+ 'commission' : exit_commission,
1283
+ 'max_pain' : max_pain,
1284
+ 'trade_pnl_less_comm': trade_pnl_less_comm,
1285
+ 'trade_pnl_bps' : (trade_pnl / this_ticker_current_position_usdt) * 100 * 100 if this_ticker_current_position_usdt!=0 else 0,
1286
+ 'gains_losses_percent' : gains_losses_percent,
1287
+ 'gains_losses_percent_label' : gains_losses_percent_label,
1288
+ 'how_long_before_closed_sec' : how_long_before_closed_sec,
1289
+ 'how_long_before_closed_sec_label' : how_long_before_closed_sec_label,
1290
+ 'max_pain_percent' : max_pain_percent,
1291
+ 'max_pain_percent_label' : max_pain_percent_label,
1292
+ 'ema_short_slope' : ema_short_slope,
1293
+ 'ema_long_slope' : ema_long_slope,
1294
+ 'num_impacting_economic_calendars' : num_impacting_economic_calendars,
1295
+ 'max_camp' : max_camp,
1296
+ 'entry_post_move_price_change_percent' : entry_post_move_price_change_percent
1297
+ }
1298
+ _last_open_trade = this_ticker_open_trades[-1]
1299
+ additional_fields = {field: _last_open_trade[field] if field in _last_open_trade else None for field in algo_param['additional_trade_fields']}
1300
+ closing_trade.update(additional_fields)
1301
+ all_trades.append(closing_trade)
1302
+
1303
+ if plot_timeseries:
1304
+ '''
1305
+ https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.axvline.html
1306
+ linestyle='-' means solid line. If you don't supply linestyle, the vertical line wont show!!!
1307
+ '''
1308
+ color = 'green' if reason=='TP' or (reason=='HC' and trade_pnl_less_comm>0) else 'red'
1309
+ all_canvas[f"{key}-param_id{algo_param['param_id']}"]['time_series_canvas'].axvline(x=this_datetime, color=color, linewidth=2, linestyle='--')
1310
+ all_canvas[f"{key}-param_id{algo_param['param_id']}"]['time_series_canvas'].scatter([this_datetime, this_datetime], [low, high], color=color)
1311
+
1312
+ # UNREAL EVALUATION. We're being pessimistic! We use low/high for estimating unrealized_pnl for buys and sells respectively here.
1313
+ pnl_percent_notional = 0
1314
+ if current_position_usdt>0:
1315
+ unrealized_pnl, unrealized_pnl_interval, unrealized_pnl_open, unrealized_pnl_live_optimistic, unrealized_pnl_live_pessimistic, unrealized_pnl_live, max_pnl_percent_notional, unrealized_pnl_boillenger, unrealized_pnl_sl, max_unrealized_pnl_live, max_pain, recovered_pnl_optimistic, recovered_pnl_pessimistic, max_recovered_pnl = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 # USDT
1316
+ _asymmetric_tp_bps = algo_param['asymmetric_tp_bps'] if lo_dayofweek in algo_param['cautious_dayofweek'] else 0
1317
+
1318
+ max_unrealized_pnl_live = max([ trade['max_unrealized_pnl_live'] if 'max_unrealized_pnl_live' in trade else 0 for trade in this_ticker_open_trades ])
1319
+ # 'min' max_pain becaues max_pain is a negative number. It's a loss!
1320
+ max_pain = min([ trade['max_pain'] if 'max_pain' in trade else 0 for trade in this_ticker_open_trades ])
1321
+ max_recovered_pnl = max([ trade['max_recovered_pnl'] if 'max_recovered_pnl' in trade else 0 for trade in this_ticker_open_trades ])
1322
+ trade_datetime = max([ trade['trade_datetime'] if 'trade_datetime' in trade else 0 for trade in this_ticker_open_trades ])
1323
+ entry_post_move_price_change_percent = max([ trade['post_move_price_change_percent'] if 'post_move_price_change_percent' in trade else 0 for trade in this_ticker_open_trades ])
1324
+ max_camp = max([ trade['max_camp'] for trade in this_ticker_open_trades])
1325
+ running_sl_percent_hard = max([ trade['running_sl_percent_hard'] for trade in this_ticker_open_trades])
1326
+
1327
+ max_pnl_potential_percent = None
1328
+ if any([ trade for trade in this_ticker_open_trades if 'target_price' in trade ]):
1329
+ max_pnl_potential_percent = max([ (trade['target_price']/trade['entry_price'] -1) *100 if trade['side']=='buy' else (trade['entry_price']/trade['target_price'] -1) *100 for trade in this_ticker_open_trades if 'target_price' in trade ])
1330
+
1331
+ kwargs = {k: v for k, v in locals().items() if k in sl_adj_func_params}
1332
+ sl_adj_func_result = sl_adj_func(**kwargs)
1333
+ running_sl_percent_hard = sl_adj_func_result['running_sl_percent_hard']
1334
+
1335
+ # this_ticker_open_trades should be updated after SL adj eval
1336
+ for trade in this_ticker_open_trades:
1337
+ trade['running_sl_percent_hard'] = running_sl_percent_hard
1338
+
1339
+ kwargs = {k: v for k, v in locals().items() if k in trailing_stop_threshold_eval_func_params}
1340
+ trailing_stop_threshold_eval_func_result = trailing_stop_threshold_eval_func(**kwargs)
1341
+ tp_min_percent = trailing_stop_threshold_eval_func_result['tp_min_percent']
1342
+ tp_max_percent = trailing_stop_threshold_eval_func_result['tp_max_percent']
1343
+ recover_min_percent = algo_param['recover_min_percent'] if 'recover_min_percent' in algo_param else None
1344
+ recover_max_pain_percent = algo_param['recover_max_pain_percent'] if 'recover_max_pain_percent' in algo_param else None
1345
+
1346
+ # tp_min_percent adj: Strategies where target_price not based on tp_max_percent, but variable
1347
+ if max_pnl_potential_percent and max_pnl_potential_percent<tp_max_percent:
1348
+ tp_minmax_ratio = tp_min_percent/tp_max_percent
1349
+ tp_max_percent = max_pnl_potential_percent
1350
+ tp_min_percent = tp_minmax_ratio * tp_max_percent
1351
+
1352
+ unrealized_pnl_eval_result = pnl_eval_func(lo_row, lo_row_tm1, running_sl_percent_hard, this_ticker_open_trades, algo_param)
1353
+ unrealized_pnl_interval = unrealized_pnl_eval_result['unrealized_pnl_interval']
1354
+ unrealized_pnl_open = unrealized_pnl_eval_result['unrealized_pnl_open']
1355
+ unrealized_pnl_live_optimistic = unrealized_pnl_eval_result['unrealized_pnl_live_optimistic']
1356
+ unrealized_pnl_live_pessimistic = unrealized_pnl_eval_result['unrealized_pnl_live_pessimistic']
1357
+ unrealized_pnl_tp = unrealized_pnl_eval_result['unrealized_pnl_tp']
1358
+ unrealized_pnl_sl = unrealized_pnl_eval_result['unrealized_pnl_sl']
1359
+ unrealized_pnl_live = unrealized_pnl_live_pessimistic
1360
+
1361
+ if unrealized_pnl_live>0 and unrealized_pnl_live_optimistic>max_unrealized_pnl_live:
1362
+ max_unrealized_pnl_live = unrealized_pnl_live_optimistic
1363
+ for trade in this_ticker_open_trades:
1364
+ trade['max_unrealized_pnl_live'] = max_unrealized_pnl_live # Evaluated optimistically!!!
1365
+
1366
+ # Do this before max_pain updated
1367
+ if unrealized_pnl_live<0 and unrealized_pnl_live_optimistic>max_pain:
1368
+ recovered_pnl_optimistic = unrealized_pnl_live_optimistic - max_pain
1369
+ recovered_pnl_pessimistic = unrealized_pnl_live_pessimistic - max_pain
1370
+ if recovered_pnl_optimistic>max_recovered_pnl:
1371
+ max_recovered_pnl = recovered_pnl_optimistic
1372
+ for trade in this_ticker_open_trades:
1373
+ trade['max_recovered_pnl'] = max_recovered_pnl
1374
+
1375
+ if unrealized_pnl_live<0:
1376
+ if unrealized_pnl_live<max_pain:
1377
+ max_pain = unrealized_pnl_live
1378
+ for trade in this_ticker_open_trades:
1379
+ trade['max_pain'] = max_pain # unrealized_pnl_live is set to unrealized_pnl_live_pessimistic!
1380
+
1381
+ if unrealized_pnl_live<0 and max_unrealized_pnl_live>0:
1382
+ # If out unrealized_pnl_live already fell from positive to negative, reset max_unrealized_pnl_live back to zero
1383
+ max_unrealized_pnl_live = 0
1384
+ for trade in this_ticker_open_trades:
1385
+ trade['max_unrealized_pnl_live'] = max_unrealized_pnl_live
1386
+
1387
+ unrealized_pnl = unrealized_pnl_live
1388
+ pnl_percent_notional = unrealized_pnl_open / current_position_usdt * 100 # This is evaluated using open (Don't use close, that's forward bias!)
1389
+ max_pnl_percent_notional = max_unrealized_pnl_live / current_position_usdt * 100
1390
+ max_pain_percent_notional = max_pain / current_position_usdt * 100
1391
+ max_recovered_pnl_percent_notional = max_recovered_pnl / current_position_usdt * 100
1392
+
1393
+ if (
1394
+ (pnl_percent_notional>0 and pnl_percent_notional>=tp_min_percent)
1395
+ or (
1396
+ recover_max_pain_percent
1397
+ and pnl_percent_notional<0
1398
+ and max_recovered_pnl_percent_notional>=recover_min_percent
1399
+ and abs(max_pain_percent_notional)>=recover_max_pain_percent
1400
+ ) # Taking 'abs': Trailing stop can fire if trade moves in either direction - if your trade is losing trade.
1401
+ ):
1402
+ '''
1403
+
1404
+ 'effective_tp_trailing_percent' is initialized to float('inf') on entries. Whenever 'pnl_percent_notional' crosses 'tp_min_percent', trailing stop mechanism kicks in.
1405
+
1406
+ https://norman-lm-fung.medium.com/gradually-tightened-trailing-stops-f7854bf1e02b
1407
+
1408
+ 'effective_tp_trailing_percent' is used to TRIGGER trailing stop.
1409
+ Please be careful if you're marking closing trade with candle close.
1410
+ '''
1411
+ if algo_param['use_gradual_tightened_trailing_stops']:
1412
+ effective_tp_trailing_percent = calc_eff_trailing_sl(
1413
+ tp_min_percent = tp_min_percent,
1414
+ tp_max_percent = tp_max_percent,
1415
+ sl_percent_trailing = algo_param['sl_percent_trailing'],
1416
+ pnl_percent_notional = max_pnl_percent_notional if pnl_percent_notional>0 else max_recovered_pnl_percent_notional,
1417
+ default_effective_tp_trailing_percent = float('inf'),
1418
+ linear=True if algo_param['trailing_stop_mode']=='linear' else False, # trailing_stop_mode: linear vs parabolic
1419
+ pow=5
1420
+ )
1421
+ else:
1422
+ effective_tp_trailing_percent = algo_param['sl_percent_trailing']
1423
+
1424
+ # 1. SL
1425
+ if (
1426
+ unrealized_pnl_live < 0
1427
+ or (
1428
+ (unrealized_pnl_live>0 and unrealized_pnl_live<max_unrealized_pnl_live)
1429
+ or (
1430
+ unrealized_pnl_live<0
1431
+ and recovered_pnl_pessimistic<max_recovered_pnl
1432
+ and abs(max_recovered_pnl_percent_notional)>=recover_min_percent
1433
+ and abs(max_pain_percent_notional)>=recover_max_pain_percent
1434
+ )
1435
+ )
1436
+ ):
1437
+ # unrealized_pnl_live is set to unrealized_pnl_live_pessimistic!
1438
+ loss_hard = abs(unrealized_pnl_live)/this_ticker_current_position_usdt * 100 if unrealized_pnl_live<0 else 0
1439
+
1440
+ if unrealized_pnl_live>0:
1441
+ loss_trailing = (1 - unrealized_pnl_live/max_unrealized_pnl_live) * 100 if unrealized_pnl_live>0 and unrealized_pnl_live<max_unrealized_pnl_live else 0
1442
+ elif unrealized_pnl_live<0:
1443
+ loss_trailing = (1 - recovered_pnl_pessimistic/max_recovered_pnl) * 100 if unrealized_pnl_live<0 and recovered_pnl_pessimistic<max_recovered_pnl else 0
1444
+
1445
+ if loss_hard>=running_sl_percent_hard:
1446
+ unrealized_pnl = (running_sl_percent_hard/algo_param['sl_hard_percent']) * unrealized_pnl_sl
1447
+ reason2 = "sl_hard_percent"
1448
+ elif (
1449
+ loss_trailing>=effective_tp_trailing_percent # loss_trailing is evaluated pessimistically.
1450
+ # and pnl_percent_notional>tp_min_percent
1451
+ # and unrealized_pnl_live >= sl_trailing_min_threshold_usdt
1452
+ ):
1453
+ '''
1454
+ If you're using 'effective_tp_trailing_percent' to approx unrealised pnl, make sure "loss_trailing>=effective_tp_trailing_percent" is the only condition.
1455
+ Don't AND this with other condition. Otherwise use close price to approx unrealised pnl instead!!!
1456
+ '''
1457
+ if unrealized_pnl_live>0:
1458
+ unrealized_pnl = min(
1459
+ ((100-effective_tp_trailing_percent)/100) * max_unrealized_pnl_live,
1460
+ this_ticker_current_position_usdt * algo_param['tp_max_percent']/100
1461
+ )
1462
+ else:
1463
+ unrealized_pnl = max_pain + ((100-effective_tp_trailing_percent)/100) * max_recovered_pnl
1464
+ # unrealized_pnl = unrealized_pnl_interval # less accurate
1465
+ reason2 = "sl_percent_trailing"
1466
+
1467
+ if (
1468
+ (loss_hard>=running_sl_percent_hard)
1469
+ or (
1470
+ loss_trailing>=effective_tp_trailing_percent
1471
+ # and pnl_percent_notional>tp_min_percent
1472
+ # and unrealized_pnl_live >= sl_trailing_min_threshold_usdt
1473
+ )
1474
+ ):
1475
+ block_entry_since_last_sl = True
1476
+ reason = 'SL' if unrealized_pnl<0 else 'TP'
1477
+ _close_open_positions(
1478
+ key,
1479
+ ticker,
1480
+ this_ticker_current_position_usdt,
1481
+ this_ticker_open_positions_side,
1482
+ current_position_usdt,
1483
+ unrealized_pnl,
1484
+ effective_tp_trailing_percent,
1485
+ lo_row, reason, reason2, gloabl_state, all_trades, all_canvas,
1486
+ algo_param
1487
+ )
1488
+ current_positions_info = _refresh_current_position(lo_timestamp_ms)
1489
+ avg_entry_price = current_positions_info['avg_entry_price']
1490
+ pos_side = current_positions_info['side']
1491
+ current_position_usdt_buy = current_positions_info['current_position_usdt_buy']
1492
+ current_position_usdt_sell = current_positions_info['current_position_usdt_sell']
1493
+ current_position_usdt = current_positions_info['current_position_usdt']
1494
+ this_ticker_open_trades = current_positions_info['this_ticker_open_trades']
1495
+ this_ticker_current_position_usdt_buy = current_positions_info['this_ticker_current_position_usdt_buy']
1496
+ this_ticker_current_position_usdt_sell = current_positions_info['this_ticker_current_position_usdt_sell']
1497
+ this_ticker_open_positions_side = current_positions_info['this_ticker_open_positions_side']
1498
+ this_ticker_current_position_usdt = current_positions_info['this_ticker_current_position_usdt']
1499
+ max_sl_trade_age_ms = current_positions_info['max_sl_trade_age_ms']
1500
+
1501
+ # sl_percent_trailing = algo_param['sl_percent_trailing'] # Reset! Remember!
1502
+ this_ticker_open_positions_side='flat' # Reset!
1503
+ reversal_camp_cache[key] = REVERSAL_CAMP_ITEM.copy()
1504
+
1505
+ # 2. TP: Trigger by unrealized_pnl_live_optimistic, not unrealized_pnl_live (which is unrealized_pnl_live_pessimistic). Pnl estimation from unrealized_pnl_boillenger however!!!
1506
+ if this_ticker_current_position_usdt>0 and unrealized_pnl_live_optimistic>0:
1507
+ kwargs = {k: v for k, v in locals().items() if k in tp_eval_func_params}
1508
+ tp_eval_func_result = tp_eval_func(**kwargs)
1509
+
1510
+ if tp_eval_func_result:
1511
+ unrealized_pnl_tp = min(
1512
+ unrealized_pnl_tp,
1513
+ this_ticker_current_position_usdt * algo_param['tp_max_percent']/100
1514
+ )
1515
+ _close_open_positions(
1516
+ key, ticker,
1517
+ this_ticker_current_position_usdt,
1518
+ this_ticker_open_positions_side,
1519
+ current_position_usdt,
1520
+ unrealized_pnl_tp,
1521
+ effective_tp_trailing_percent,
1522
+ lo_row, 'TP', '', gloabl_state, all_trades, all_canvas,
1523
+ algo_param
1524
+ )
1525
+ current_position_usdt -= this_ticker_current_position_usdt
1526
+ this_ticker_current_position_usdt = 0
1527
+ this_ticker_open_positions_side='flat' # Reset!
1528
+ reversal_camp_cache[key] = REVERSAL_CAMP_ITEM.copy()
1529
+
1530
+ def _position_size_and_cash_check(
1531
+ current_position_usdt : float, # All positions added together (include other tickers)
1532
+ this_ticker_current_position_usdt : float, # This position only
1533
+ target_order_notional : float,
1534
+ total_equity : float,
1535
+ target_position_size_percent_total_equity : float,
1536
+ cash : float
1537
+ ) -> bool:
1538
+ return (
1539
+ (current_position_usdt + target_order_notional <= total_equity * (target_position_size_percent_total_equity/100))
1540
+ and (this_ticker_current_position_usdt + target_order_notional <= total_equity * (target_position_size_percent_total_equity/100))
1541
+ and cash >= target_order_notional
1542
+ )
1543
+
1544
+ entry_adj_bps = 0 # Essentially disable this for time being.
1545
+
1546
+ if(
1547
+ lo_low<=(lo_boillenger_lower*(1-entry_adj_bps/10000))
1548
+ ):
1549
+ lo_boillenger_lower_breached_history = lo_boillenger_lower_breached_cache.get(key, [])
1550
+ lo_boillenger_upper_breached_history = lo_boillenger_upper_breached_cache.get(key, [])
1551
+ lo_boillenger_lower_breached_cache[key] = lo_boillenger_lower_breached_history
1552
+ lo_boillenger_upper_breached_cache[key] = lo_boillenger_upper_breached_history
1553
+ lo_boillenger_upper_breached_history.clear()
1554
+ lo_boillenger_lower_breached_history.append(lo_datetime)
1555
+ reversal_camp_cache[key] = REVERSAL_CAMP_ITEM.copy()
1556
+ elif(
1557
+ lo_high>=(lo_boillenger_upper*(1+entry_adj_bps/10000))
1558
+ ):
1559
+ lo_boillenger_lower_breached_history = lo_boillenger_lower_breached_cache.get(key, [])
1560
+ lo_boillenger_upper_breached_history = lo_boillenger_upper_breached_cache.get(key, [])
1561
+ lo_boillenger_lower_breached_cache[key] = lo_boillenger_lower_breached_history
1562
+ lo_boillenger_upper_breached_cache[key] = lo_boillenger_upper_breached_history
1563
+ lo_boillenger_lower_breached_history.clear()
1564
+ lo_boillenger_upper_breached_history.append(lo_datetime)
1565
+ reversal_camp_cache[key] = REVERSAL_CAMP_ITEM.copy()
1566
+
1567
+ if algo_param['constant_order_notional']:
1568
+ target_order_notional = algo_param['target_order_notional']
1569
+ else:
1570
+ kwargs = {k: v for k, v in locals().items() if k in order_notional_adj_func_params}
1571
+ order_notional_adj_func_result = order_notional_adj_func(**kwargs)
1572
+ target_order_notional = order_notional_adj_func_result['target_order_notional']
1573
+
1574
+ order_notional_long, order_notional_short = target_order_notional, target_order_notional
1575
+ if algo_param['clip_order_notional_to_best_volumes']:
1576
+ order_notional_long = min(lo_volume * lo_low, target_order_notional)
1577
+ order_notional_short = min(lo_volume * lo_high, target_order_notional)
1578
+
1579
+ kwargs = {k: v for k, v in locals().items() if k in allow_entry_initial_func_params}
1580
+ allow_entry_initial_func_result = allow_entry_initial_func(**kwargs)
1581
+ allow_entry_initial_long = allow_entry_initial_func_result['long']
1582
+ allow_entry_initial_short = allow_entry_initial_func_result['short']
1583
+ allow_entry_final_long = False
1584
+ allow_entry_final_short = False
1585
+
1586
+ if algo_param['enable_sliced_entry']:
1587
+ kwargs = {k: v for k, v in locals().items() if k in allow_slice_entry_func_params}
1588
+ allow_slice_entry_func_result = allow_slice_entry_func(**kwargs)
1589
+
1590
+ # 3. Entries
1591
+ if (
1592
+ algo_param['strategy_mode'] in [ 'long_only', 'long_short']
1593
+ and order_notional_long>0
1594
+ and (not algo_param['block_entries_on_impacting_ecoevents'] or num_impacting_economic_calendars==0)
1595
+ and not block_entry_since_last_sl
1596
+ and (
1597
+ (
1598
+ this_ticker_open_positions_side=='flat'
1599
+ and allow_entry_initial_long
1600
+ and _position_size_and_cash_check(current_position_usdt, this_ticker_current_position_usdt, order_notional_long, gloabl_state.total_equity, target_position_size_percent_total_equity, gloabl_state.cash)
1601
+ ) or (
1602
+ this_ticker_open_positions_side=='buy'
1603
+ and _position_size_and_cash_check(current_position_usdt, this_ticker_current_position_usdt, order_notional_long, gloabl_state.total_equity, target_position_size_percent_total_equity, gloabl_state.cash)
1604
+ and (algo_param['enable_sliced_entry'] and allow_slice_entry_func_result['long'])
1605
+ )
1606
+ )
1607
+ ):
1608
+ # Long
1609
+ order_notional = order_notional_long
1610
+
1611
+ if not reversal_camp_cache[key]['camp1'] and not reversal_camp_cache[key]['camp2'] and not reversal_camp_cache[key]['camp3']:
1612
+ reversal_camp_cache[key]['camp1'] = True
1613
+ reversal_camp_cache[key]['camp1_price'] = lo_close
1614
+ elif reversal_camp_cache[key]['camp1'] and not reversal_camp_cache[key]['camp2'] and not reversal_camp_cache[key]['camp3']:
1615
+ reversal_camp_cache[key]['camp2'] = True
1616
+ reversal_camp_cache[key]['camp2_price'] = lo_close
1617
+ elif reversal_camp_cache[key]['camp1'] and reversal_camp_cache[key]['camp2'] and not reversal_camp_cache[key]['camp3']:
1618
+ reversal_camp_cache[key]['camp3'] = True
1619
+ reversal_camp_cache[key]['camp3_price'] = lo_close
1620
+ reversal_camp_cache[key]['datetime'] = lo_datetime
1621
+
1622
+ fetch_historical_price_func = fetch_price
1623
+ kwargs = {k: v for k, v in locals().items() if k in allow_entry_final_func_params}
1624
+ allow_entry_final_func_result = allow_entry_final_func(**kwargs)
1625
+
1626
+ allow_entry_final_long = allow_entry_final_func_result['long']
1627
+ if (allow_entry_final_long):
1628
+ order_notional_adj_factor = algo_param['dayofweek_adj_map_order_notional'][lo_dayofweek]
1629
+ if order_notional>0 and order_notional_adj_factor>0:
1630
+ max_camp = _max_camp(reversal_camp_cache[key]['camp1'], reversal_camp_cache[key]['camp2'], reversal_camp_cache[key]['camp3'])
1631
+ target_price = allow_entry_final_func_result['target_price_long']
1632
+ reference_price = allow_entry_final_func_result['reference_price']
1633
+ sitting_on_boillenger_band = allow_entry_final_func_result['sitting_on_boillenger_band'] if 'sitting_on_boillenger_band' in allow_entry_final_func_result else None
1634
+ _additional_trade_fields = {k: v for k, v in locals().items() if k in algo_param['additional_trade_fields']}
1635
+
1636
+ commission = order_notional_adj_factor*order_notional * commission_bps / 10000
1637
+ gloabl_state.total_commission += commission
1638
+
1639
+ cash_before = gloabl_state.cash
1640
+ gloabl_state.cash = gloabl_state.cash - order_notional_adj_factor*order_notional - commission
1641
+ cash_after = gloabl_state.cash
1642
+
1643
+ running_total_num_positions : int = len([ 1 for x in all_trades if x['reason']=='entry' and not x['closed']])
1644
+
1645
+ entry_price = allow_entry_final_func_result['entry_price_long']
1646
+ reversal_camp_cache[key]['price'] = entry_price
1647
+
1648
+ pnl_potential_bps = (target_price/entry_price - 1) *10000 if target_price else None
1649
+
1650
+ new_trade_0 = {
1651
+ 'trade_datetime' : lo_datetime,
1652
+ 'timestamp_ms' : lo_timestamp_ms,
1653
+ 'dayofweek' : lo_dayofweek,
1654
+ 'exchange' : exchange.name,
1655
+ 'symbol' : ticker,
1656
+ 'side' : 'buy',
1657
+ 'size' : order_notional_adj_factor*order_notional/lo_close, # in base ccy.
1658
+ 'entry_price' : entry_price,
1659
+ 'target_price' : target_price,
1660
+ 'pnl_potential_bps' : pnl_potential_bps,
1661
+ 'ref_ema_close_fast' : ref_ema_close_fast,
1662
+ 'running_sl_percent_hard' : algo_param['sl_hard_percent'],
1663
+ 'closed' : False,
1664
+ 'reason' : 'entry',
1665
+ 'reason2' : '',
1666
+ 'total_equity' : gloabl_state.total_equity,
1667
+ 'this_ticker_current_position_usdt' : this_ticker_current_position_usdt,
1668
+ 'current_position_usdt' : current_position_usdt,
1669
+ 'running_total_num_positions' : running_total_num_positions,
1670
+ 'cash_before' : cash_before,
1671
+ 'cash_after' : cash_after,
1672
+ 'order_notional' : order_notional,
1673
+ 'commission' : commission,
1674
+ 'max_pain' : 0,
1675
+ 'num_impacting_economic_calendars' : num_impacting_economic_calendars,
1676
+ 'max_camp': max_camp,
1677
+ 'post_move_price_change_percent' : post_move_price_change_percent
1678
+ }
1679
+ all_trades.append(new_trade_0)
1680
+ new_trade_0.update(_additional_trade_fields)
1681
+
1682
+ # Resets!
1683
+ effective_tp_trailing_percent = float('inf')
1684
+ lo_boillenger_lower_breached_history = lo_boillenger_lower_breached_cache.get(key, [])
1685
+ lo_boillenger_lower_breached_history.clear()
1686
+
1687
+ if plot_timeseries:
1688
+ '''
1689
+ https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.axvline.html
1690
+ linestyle='-' means solid line. If you don't supply linestyle, the vertical line wont show!!!
1691
+ '''
1692
+ all_canvas[f"{key}-param_id{algo_param['param_id']}"]['time_series_canvas'].axvline(x=lo_datetime, color='gray', linewidth=1, linestyle='-')
1693
+ all_canvas[f"{key}-param_id{algo_param['param_id']}"]['time_series_canvas'].scatter([lo_datetime, lo_datetime], [lo_low, lo_high], color='gray')
1694
+
1695
+ elif (
1696
+ algo_param['strategy_mode'] in [ 'short_only', 'long_short']
1697
+ and order_notional_short>0
1698
+ and (not algo_param['block_entries_on_impacting_ecoevents'] or num_impacting_economic_calendars==0)
1699
+ and not block_entry_since_last_sl
1700
+ and (
1701
+ (
1702
+ this_ticker_open_positions_side=='flat'
1703
+ and allow_entry_initial_short
1704
+ and _position_size_and_cash_check(current_position_usdt, this_ticker_current_position_usdt, order_notional_short, gloabl_state.total_equity, target_position_size_percent_total_equity, gloabl_state.cash)
1705
+ ) or (
1706
+ this_ticker_open_positions_side=='sell'
1707
+ and _position_size_and_cash_check(current_position_usdt, this_ticker_current_position_usdt, order_notional_short, gloabl_state.total_equity, target_position_size_percent_total_equity, gloabl_state.cash)
1708
+ and (algo_param['enable_sliced_entry'] and allow_slice_entry_func_result['short'])
1709
+ )
1710
+ )
1711
+ ):
1712
+ # Short
1713
+ order_notional = order_notional_short
1714
+
1715
+ if not reversal_camp_cache[key]['camp1'] and not reversal_camp_cache[key]['camp2'] and not reversal_camp_cache[key]['camp3']:
1716
+ reversal_camp_cache[key]['camp1'] = True
1717
+ reversal_camp_cache[key]['camp1_price'] = lo_close
1718
+ elif reversal_camp_cache[key]['camp1'] and not reversal_camp_cache[key]['camp2'] and not reversal_camp_cache[key]['camp3']:
1719
+ reversal_camp_cache[key]['camp2'] = True
1720
+ reversal_camp_cache[key]['camp2_price'] = lo_close
1721
+ elif reversal_camp_cache[key]['camp1'] and reversal_camp_cache[key]['camp2'] and not reversal_camp_cache[key]['camp3']:
1722
+ reversal_camp_cache[key]['camp3'] = True
1723
+ reversal_camp_cache[key]['camp3_price'] = lo_close
1724
+ reversal_camp_cache[key]['datetime'] = lo_datetime
1725
+
1726
+ fetch_historical_price_func = fetch_price
1727
+ kwargs = {k: v for k, v in locals().items() if k in allow_entry_final_func_params}
1728
+ allow_entry_final_func_result = allow_entry_final_func(**kwargs)
1729
+
1730
+ allow_entry_final_short = allow_entry_final_func_result['short']
1731
+ if (allow_entry_final_short):
1732
+ order_notional_adj_factor = algo_param['dayofweek_adj_map_order_notional'][lo_dayofweek]
1733
+ if order_notional>0 and order_notional_adj_factor>0:
1734
+ max_camp = _max_camp(reversal_camp_cache[key]['camp1'], reversal_camp_cache[key]['camp2'], reversal_camp_cache[key]['camp3'])
1735
+ target_price = allow_entry_final_func_result['target_price_short']
1736
+ reference_price = allow_entry_final_func_result['reference_price']
1737
+ sitting_on_boillenger_band = allow_entry_final_func_result['sitting_on_boillenger_band'] if 'sitting_on_boillenger_band' in allow_entry_final_func_result else None
1738
+ _additional_trade_fields = {k: v for k, v in locals().items() if k in algo_param['additional_trade_fields']}
1739
+
1740
+ commission = order_notional_adj_factor*order_notional * commission_bps / 10000
1741
+ gloabl_state.total_commission += commission
1742
+
1743
+ cash_before = gloabl_state.cash
1744
+ gloabl_state.cash = gloabl_state.cash - order_notional_adj_factor*order_notional - commission
1745
+ cash_after = gloabl_state.cash
1746
+
1747
+ running_total_num_positions : int = len([ 1 for x in all_trades if x['reason']=='entry' and not x['closed']])
1748
+
1749
+ entry_price = allow_entry_final_func_result['entry_price_short']
1750
+ reversal_camp_cache[key]['price'] = entry_price
1751
+
1752
+ pnl_potential_bps = (entry_price/target_price - 1) *10000 if target_price else None
1753
+
1754
+ new_trade_0 = {
1755
+ 'trade_datetime' : lo_datetime,
1756
+ 'timestamp_ms' : lo_timestamp_ms,
1757
+ 'dayofweek' : lo_dayofweek,
1758
+ 'exchange' : exchange.name,
1759
+ 'symbol' : ticker,
1760
+ 'side' : 'sell',
1761
+ 'size' : order_notional_adj_factor*order_notional/lo_close, # in base ccy
1762
+ 'entry_price' : entry_price,
1763
+ 'target_price' : target_price,
1764
+ 'pnl_potential_bps' : pnl_potential_bps,
1765
+ 'ref_ema_close_fast' : ref_ema_close_fast,
1766
+ 'running_sl_percent_hard' : algo_param['sl_hard_percent'],
1767
+ 'closed' : False,
1768
+ 'reason' : 'entry',
1769
+ 'reason2' : '',
1770
+ 'total_equity' : gloabl_state.total_equity,
1771
+ 'this_ticker_current_position_usdt' : this_ticker_current_position_usdt,
1772
+ 'current_position_usdt' : current_position_usdt,
1773
+ 'running_total_num_positions' : running_total_num_positions,
1774
+ 'cash_before' : cash_before,
1775
+ 'cash_after' : cash_after,
1776
+ 'order_notional' : order_notional,
1777
+ 'commission' : commission,
1778
+ 'max_pain' : 0,
1779
+ 'num_impacting_economic_calendars' : num_impacting_economic_calendars,
1780
+ 'max_camp': max_camp,
1781
+ 'post_move_price_change_percent' : post_move_price_change_percent
1782
+ }
1783
+ all_trades.append(new_trade_0)
1784
+ new_trade_0.update(_additional_trade_fields)
1785
+
1786
+ # Resets!
1787
+ effective_tp_trailing_percent = float('inf')
1788
+ lo_boillenger_upper_breached_history = lo_boillenger_upper_breached_cache.get(key, [])
1789
+ lo_boillenger_upper_breached_history.clear()
1790
+
1791
+ if plot_timeseries:
1792
+ '''
1793
+ https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.axvline.html
1794
+ linestyle='-' means solid line. If you don't supply linestyle, the vertical line wont show!!!
1795
+ '''
1796
+ all_canvas[f"{key}-param_id{algo_param['param_id']}"]['time_series_canvas'].axvline(x=lo_datetime, color='gray', linewidth=1, linestyle='-')
1797
+ all_canvas[f"{key}-param_id{algo_param['param_id']}"]['time_series_canvas'].scatter([lo_datetime, lo_datetime], [lo_low, lo_high], color='gray')
1798
+
1799
+ iter_info = f"param_id: {algo_param['param_id']}, {key} i: {i} {lo_datetime}, # trades: {len(all_trades)}, equity: {round(gloabl_state.total_equity,2)}"
1800
+ if i%100==0 and i%1000!=0:
1801
+ print(iter_info)
1802
+ elif i%1000==0:
1803
+ logger.info(iter_info)
1804
+
1805
+ if i==pd_lo_candles.shape[0]-1:
1806
+ # HC
1807
+ if this_ticker_current_position_usdt>0:
1808
+ _close_open_positions(key, ticker, this_ticker_current_position_usdt, this_ticker_open_positions_side, current_position_usdt, unrealized_pnl, None, lo_row, 'HC', '', gloabl_state, all_trades, all_canvas, algo_param)
1809
+
1810
+ sorted_filtered_tickers.clear()
1811
+ sorted_filtered_tickers = None
1812
+
1813
+ if gloabl_state.total_equity<target_order_notional:
1814
+ logger.warning(f"total_equity {gloabl_state.total_equity} < target_order_notional {target_order_notional} exiting prematurely on {lo_datetime}!!!")
1815
+ break
1816
+
1817
+ if plot_timeseries:
1818
+ for exchange in exchanges:
1819
+ for ticker in tickers:
1820
+ key = f"{exchange.name}-{ticker}-param_id{algo_param['param_id']}"
1821
+ canvas = all_canvas[key]
1822
+ canvas['plt'].savefig(f"ts_{key.replace('/','').replace(':','')}.jpg", format='jpg', dpi=300)
1823
+
1824
+ for reference_price_cache_file in reference_price_cache:
1825
+ reference_price_cache[reference_price_cache_file].sort_values("timestamp_ms", inplace=True)
1826
+ reference_price_cache[reference_price_cache_file].to_csv(reference_price_cache_file)
1827
+
1828
+ num_tp = len([ x for x in all_trades if x['reason']=='TP'])
1829
+ num_sl = len([ x for x in all_trades if x['reason']=='SL'])
1830
+ num_hc_tp = len([ x for x in all_trades if x['reason']=='HC' and x['trade_pnl']>0 ] )
1831
+ num_hc_sl = len([ x for x in all_trades if x['reason']=='HC' and x['trade_pnl']<=0 ] )
1832
+ num_hc = num_hc_tp + num_hc_sl
1833
+
1834
+ return {
1835
+ 'realized_pnl' : sum([x['trade_pnl'] for x in all_trades if 'trade_pnl' in x]) - gloabl_state.total_commission,
1836
+ 'total_commission' : gloabl_state.total_commission,
1837
+ 'hit_ratio' : (num_tp + num_hc_tp) / (num_tp + num_sl + num_hc),
1838
+ 'num_tp' : num_tp,
1839
+ 'num_sl' : num_sl,
1840
+ 'num_hc' : num_hc,
1841
+ 'num_entry' : num_tp + num_sl + num_hc,
1842
+ 'trades' : all_trades,
1843
+ 'exceptions' : exceptions
1844
+ }
1845
+
1846
+ def run_all_scenario(
1847
+ algo_params : List[Dict[str, Any]],
1848
+ exchanges : List[Exchange],
1849
+
1850
+ order_notional_adj_func : Callable[..., float],
1851
+ allow_entry_initial_func : Callable[..., bool],
1852
+ allow_entry_final_func : Callable[..., bool],
1853
+ allow_slice_entry_func : Callable[..., bool],
1854
+ sl_adj_func : Callable[..., Dict[str, float]],
1855
+ trailing_stop_threshold_eval_func : Callable[..., Dict[str, float]],
1856
+ pnl_eval_func : Callable[..., Dict[str, float]],
1857
+ tp_eval_func : Callable[..., bool],
1858
+ sort_filter_universe_func : Callable[..., List[str]],
1859
+
1860
+ logger,
1861
+
1862
+ reference_start_dt : datetime = datetime(2021,1,1, tzinfo=timezone.utc),
1863
+ ) -> List[Dict]:
1864
+ all_exceptions = []
1865
+
1866
+ start = datetime.now()
1867
+ max_test_end_date = start
1868
+
1869
+ economic_calendars_file = algo_params[0]['economic_calendars_file']
1870
+ ecoevents_mapped_regions = algo_params[0]['ecoevents_mapped_regions']
1871
+ pd_economic_calendars = None
1872
+ economic_calendars_loaded : bool = False
1873
+ if os.path.isfile(economic_calendars_file):
1874
+ pd_economic_calendars = pd.read_csv(economic_calendars_file)
1875
+ pd_economic_calendars = pd_economic_calendars[pd_economic_calendars.region.isin(ecoevents_mapped_regions)]
1876
+ economic_calendars_loaded = True if pd_economic_calendars.shape[0]>0 else False
1877
+
1878
+ i : int = 1
1879
+ algo_results : List[Dict] = []
1880
+ best_realized_pnl, best_algo_result = 0, None
1881
+ for algo_param in algo_params:
1882
+
1883
+ algo_result : Dict = {
1884
+ 'param' : algo_param
1885
+ }
1886
+ algo_results.append(algo_result)
1887
+
1888
+ # We calc test_end_date with 'lo' (not with 'hi'), we assume it'd be the same.
1889
+ test_start_date = algo_param['start_date']
1890
+ test_fetch_start_date = test_start_date
1891
+ lo_candle_size = algo_param['lo_candle_size']
1892
+ lo_num_intervals = int(lo_candle_size[0])
1893
+ lo_interval = lo_candle_size[-1]
1894
+ lo_how_many_candles = algo_param['lo_how_many_candles']
1895
+ if lo_interval=="m":
1896
+ test_end_date = test_start_date + timedelta(minutes=lo_num_intervals*lo_how_many_candles)
1897
+ test_fetch_start_date = test_fetch_start_date - timedelta(minutes=algo_param['lo_stats_computed_over_how_many_candles']*2)
1898
+ test_end_date_ref = test_end_date + timedelta(minutes=algo_param['lo_stats_computed_over_how_many_candles']*4)
1899
+ elif lo_interval=="h":
1900
+ test_end_date = test_start_date + timedelta(hours=lo_num_intervals*lo_how_many_candles)
1901
+ test_fetch_start_date = test_fetch_start_date - timedelta(hours=algo_param['lo_stats_computed_over_how_many_candles']*2)
1902
+ test_end_date_ref = test_end_date + timedelta(hours=algo_param['lo_stats_computed_over_how_many_candles']*4)
1903
+ elif lo_interval=="d":
1904
+ test_end_date = test_start_date + timedelta(days=lo_num_intervals*lo_how_many_candles)
1905
+ test_fetch_start_date = test_fetch_start_date - timedelta(days=algo_param['lo_stats_computed_over_how_many_candles']*2)
1906
+ test_end_date_ref = test_end_date + timedelta(days=algo_param['lo_stats_computed_over_how_many_candles']*4)
1907
+ test_end_date = test_end_date if test_end_date < max_test_end_date else max_test_end_date
1908
+ test_end_date_ref = test_end_date_ref if test_end_date_ref < max_test_end_date else max_test_end_date
1909
+ cutoff_ts = int(test_fetch_start_date.timestamp()) # in seconds
1910
+
1911
+
1912
+ ####################################### STEP 1. Fetch candles (Because each test may have diff test_end_date, you need re-fetch candles for each algo_param) #######################################
1913
+ '''
1914
+ cutoff_ts in seconds, example '1668135382'
1915
+
1916
+ exchanges[0].fetch_ohlcv('ETHUSDT', "1m", cutoff_ts)
1917
+
1918
+ Candles format, first field is timestamp in ms:
1919
+ [
1920
+ [1502942400000, 301.13, 301.13, 301.13, 301.13, 0.42643],
1921
+ [1502942460000, 301.13, 301.13, 301.13, 301.13, 2.75787],
1922
+ [1502942520000, 300.0, 300.0, 300.0, 300.0, 0.0993],
1923
+ [1502942580000, 300.0, 300.0, 300.0, 300.0, 0.31389],
1924
+ ...
1925
+ ]
1926
+ '''
1927
+ delisted : List[str] = []
1928
+
1929
+ data_fetch_start : float = time.time()
1930
+
1931
+ # Fetch BTC
1932
+ reference_ticker : str = algo_param['reference_ticker']
1933
+ target_candle_file_name_fast : str = f'{reference_ticker.replace("^","").replace("/","").replace(":","")}_fast_candles_{datetime(2021,1,1, tzinfo=timezone.utc).strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date_ref.strftime("%Y-%m-%d-%H-%M-%S")}_1d.csv'
1934
+ target_candle_file_name_slow : str = f'{reference_ticker.replace("^","").replace("/","").replace(":","")}_slow_candles_{datetime(2021,1,1, tzinfo=timezone.utc).strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date_ref.strftime("%Y-%m-%d-%H-%M-%S")}_1d.csv'
1935
+ logger.info(f"reference_ticker: {reference_ticker}, target_candle_file_name_fast: {target_candle_file_name_fast}, target_candle_file_name_slow: {target_candle_file_name_slow}, reference_candles_file: {algo_param['reference_candles_file'] if 'reference_candles_file' in algo_param else '---'}")
1936
+ if algo_param['force_reload'] or not os.path.isfile(target_candle_file_name_fast):
1937
+ if algo_param['force_reload'] and 'reference_candles_file' in algo_param and algo_param['reference_candles_file'] and os.path.isfile(algo_param['reference_candles_file']):
1938
+ pd_ref_candles_fast = pd.read_csv(algo_param['reference_candles_file'])
1939
+ pd_ref_candles_slow : pd.DataFrame = pd_ref_candles_fast.copy(deep=True)
1940
+ logger.info(f"reference candles loaded from {algo_param['reference_candles_file']}")
1941
+
1942
+ else:
1943
+ ref_candles : Dict[str, pd.DataFrame] = fetch_candles(
1944
+ start_ts=int(reference_start_dt.timestamp()),
1945
+ end_ts=int(test_end_date_ref.timestamp()),
1946
+ exchange=exchanges[0],
1947
+ normalized_symbols=[reference_ticker],
1948
+ candle_size = '1d',
1949
+ num_candles_limit=algo_param['num_candles_limit'],
1950
+ logger=logger,
1951
+ cache_dir=algo_param['cache_candles'],
1952
+ list_ts_field=exchanges[0].options['list_ts_field'] if 'list_ts_field' in exchanges[0].options else None
1953
+ )
1954
+ logger.info(f"Reference candles fetched: {reference_ticker}, start: {reference_start_dt}, end: {test_end_date_ref}")
1955
+ pd_ref_candles_fast : pd.DataFrame = ref_candles[reference_ticker]
1956
+ pd_ref_candles_slow : pd.DataFrame = pd_ref_candles_fast.copy(deep=True)
1957
+
1958
+ compute_candles_stats(pd_candles=pd_ref_candles_fast, boillenger_std_multiples=2, sliding_window_how_many_candles=algo_param['ref_ema_num_days_fast'], slow_fast_interval_ratio=int(algo_param['ref_ema_num_days_fast']/2), rsi_sliding_window_how_many_candles=algo_param['rsi_sliding_window_how_many_candles'], rsi_trend_sliding_window_how_many_candles=algo_param['rsi_trend_sliding_window_how_many_candles'], hurst_exp_window_how_many_candles=algo_param['hurst_exp_window_how_many_candles'], target_fib_level=algo_param['target_fib_level'], pypy_compat=algo_param['pypy_compat'])
1959
+ compute_candles_stats(pd_candles=pd_ref_candles_slow, boillenger_std_multiples=2, sliding_window_how_many_candles=algo_param['ref_ema_num_days_slow'], slow_fast_interval_ratio=int(algo_param['ref_ema_num_days_slow']/2), rsi_sliding_window_how_many_candles=algo_param['rsi_sliding_window_how_many_candles'], rsi_trend_sliding_window_how_many_candles=algo_param['rsi_trend_sliding_window_how_many_candles'], hurst_exp_window_how_many_candles=algo_param['hurst_exp_window_how_many_candles'], target_fib_level=algo_param['target_fib_level'], pypy_compat=algo_param['pypy_compat'])
1960
+ logger.info(f"Reference candles {reference_ticker} compute_candles_stats done.")
1961
+
1962
+ pd_ref_candles_fast.to_csv(target_candle_file_name_fast)
1963
+ pd_ref_candles_slow.to_csv(target_candle_file_name_slow)
1964
+
1965
+ else:
1966
+ pd_ref_candles_fast : pd.DataFrame = pd.read_csv(target_candle_file_name_fast)
1967
+ pd_ref_candles_slow : pd.DataFrame = pd.read_csv(target_candle_file_name_slow)
1968
+ fix_column_types(pd_ref_candles_fast)
1969
+ fix_column_types(pd_ref_candles_slow)
1970
+ logger.info(f"Reference candles {reference_ticker} loaded from target_candle_file_name_fast: {target_candle_file_name_fast}, target_candle_file_name_slow: {target_candle_file_name_slow}")
1971
+
1972
+ total_seconds = (test_end_date_ref - test_start_date).total_seconds()
1973
+ total_hours = total_seconds / 3600
1974
+ total_days = total_hours / 24
1975
+ sliding_window_how_many_candles : int = 0
1976
+ sliding_window_how_many_candles = int(total_days / algo_param['sliding_window_ratio'])
1977
+
1978
+ ref_candles_partitions, pd_hi_candles_partitions, pd_lo_candles_partitions = None, None, None
1979
+ if not algo_param['pypy_compat']:
1980
+ ref_candles_partitions = partition_sliding_window(
1981
+ pd_candles = pd_ref_candles_fast,
1982
+ sliding_window_how_many_candles = sliding_window_how_many_candles,
1983
+ smoothing_window_size_ratio = algo_param['smoothing_window_size_ratio'],
1984
+ linregress_stderr_threshold = algo_param['linregress_stderr_threshold'],
1985
+ max_recur_depth = algo_param['max_recur_depth'],
1986
+ min_segment_size_how_many_candles = algo_param['min_segment_size_how_many_candles'],
1987
+ segment_consolidate_slope_ratio_threshold = algo_param['segment_consolidate_slope_ratio_threshold'],
1988
+ sideway_price_condition_threshold = algo_param['sideway_price_condition_threshold']
1989
+ )
1990
+ candle_segments_jpg_file_name : str = f'{reference_ticker.replace("^","").replace("/","").replace(":","")}_refcandles_w_segments_{datetime(2021,1,1, tzinfo=timezone.utc).strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date_ref.strftime("%Y-%m-%d-%H-%M-%S")}_1d.jpg'
1991
+ plot_segments(pd_ref_candles_fast, ref_candles_partitions, candle_segments_jpg_file_name)
1992
+
1993
+ candle_segments_file_name : str = f'{reference_ticker.replace("^","").replace("/","").replace(":","")}_refcandles_w_segments_{datetime(2021,1,1, tzinfo=timezone.utc).strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date_ref.strftime("%Y-%m-%d-%H-%M-%S")}_1d.csv'
1994
+ pd_ref_candles_segments = segments_to_df(ref_candles_partitions['segments'])
1995
+ pd_ref_candles_segments.to_csv(candle_segments_file_name)
1996
+
1997
+ all_exchange_candles : Dict[str, Dict[str, Dict[str, pd.DataFrame]]] = {}
1998
+ for exchange in exchanges:
1999
+ markets = exchange.load_markets()
2000
+ if exchange.name not in all_exchange_candles:
2001
+ all_exchange_candles[exchange.name] = {}
2002
+
2003
+ if algo_param['white_list_tickers']:
2004
+ tickers = algo_param['white_list_tickers']
2005
+ else:
2006
+ tickers = list(markets.keys())
2007
+
2008
+ for ticker in tickers:
2009
+ if ticker not in markets:
2010
+ err_msg = f"{ticker}: {'no longer in markets'}"
2011
+ logger.error(err_msg)
2012
+ delisted.append(ticker)
2013
+ else:
2014
+ all_exchange_candles[exchange.name][ticker] = {}
2015
+
2016
+ _ticker = ticker.split(":")[0].replace("/","")
2017
+ total_seconds = (test_end_date - test_fetch_start_date).total_seconds()
2018
+ total_hours = total_seconds / 3600
2019
+ total_days = total_hours / 24
2020
+ sliding_window_how_many_candles : int = 0
2021
+ sliding_window_how_many_candles = int(total_days / algo_param['sliding_window_ratio'])
2022
+
2023
+ pd_hi_candles = None
2024
+ target_candle_file_name : str = f'{_ticker}_candles_{test_fetch_start_date.strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date.strftime("%Y-%m-%d-%H-%M-%S")}_{algo_param["hi_candle_size"]}.csv'
2025
+ if algo_param['force_reload'] or not os.path.isfile(target_candle_file_name):
2026
+ if algo_param['force_reload'] and 'hi_candles_file' in algo_param and algo_param['hi_candles_file'] and os.path.isfile(algo_param['hi_candles_file']):
2027
+ pd_hi_candles : pd.DataFrame = pd.read_csv(algo_param['hi_candles_file'])
2028
+
2029
+ else:
2030
+ hi_candles : Dict[str, pd.DataFrame] = fetch_candles(
2031
+ start_ts=cutoff_ts,
2032
+ end_ts=int(test_end_date.timestamp()),
2033
+ exchange=exchange, normalized_symbols=[ ticker ],
2034
+ candle_size = algo_param['hi_candle_size'],
2035
+ num_candles_limit=algo_param['num_candles_limit'],
2036
+ logger=logger,
2037
+ cache_dir=algo_param['cache_candles'],
2038
+ list_ts_field=exchange.options['list_ts_field']
2039
+ )
2040
+ pd_hi_candles : pd.DataFrame = hi_candles[ticker]
2041
+ logger.info(f"pd_hi_candles fetched: {ticker} {pd_hi_candles.shape}, start: {cutoff_ts}, end: {int(test_end_date.timestamp())}")
2042
+ compute_candles_stats(pd_candles=pd_hi_candles, boillenger_std_multiples=algo_param['boillenger_std_multiples'], sliding_window_how_many_candles=algo_param['hi_stats_computed_over_how_many_candles'], slow_fast_interval_ratio=(algo_param['hi_stats_computed_over_how_many_candles']/algo_param['hi_ma_short_interval']), rsi_sliding_window_how_many_candles=algo_param['rsi_sliding_window_how_many_candles'], rsi_trend_sliding_window_how_many_candles=algo_param['rsi_trend_sliding_window_how_many_candles'], hurst_exp_window_how_many_candles=algo_param['hurst_exp_window_how_many_candles'], target_fib_level=algo_param['target_fib_level'], pypy_compat=algo_param['pypy_compat'])
2043
+ logger.info(f"pd_hi_candles {ticker} compute_candles_stats done: {target_candle_file_name}")
2044
+ pd_hi_candles.to_csv(target_candle_file_name)
2045
+
2046
+ if pd_hi_candles is not None and pd_hi_candles.shape[0]>0:
2047
+ first_candle_datetime = datetime.fromtimestamp(pd_hi_candles.iloc[0]['timestamp_ms']/1000)
2048
+ last_candle_datetime = datetime.fromtimestamp(pd_hi_candles.iloc[-1]['timestamp_ms']/1000)
2049
+
2050
+ assert(last_candle_datetime>first_candle_datetime)
2051
+ else:
2052
+ err_msg = f"{ticker} no hi candles?"
2053
+ logger.error(err_msg)
2054
+ else:
2055
+ pd_hi_candles : pd.DataFrame = pd.read_csv(target_candle_file_name)
2056
+ fix_column_types(pd_hi_candles)
2057
+ logger.info(f"pd_hi_candles {ticker} {pd_hi_candles.shape} loaded from {target_candle_file_name}")
2058
+
2059
+ if not algo_param['pypy_compat']:
2060
+ pd_hi_candles_partitions = partition_sliding_window(
2061
+ pd_candles = pd_hi_candles,
2062
+ sliding_window_how_many_candles = sliding_window_how_many_candles,
2063
+ smoothing_window_size_ratio = algo_param['smoothing_window_size_ratio'],
2064
+ linregress_stderr_threshold = algo_param['linregress_stderr_threshold'],
2065
+ max_recur_depth = algo_param['max_recur_depth'],
2066
+ min_segment_size_how_many_candles = algo_param['min_segment_size_how_many_candles'],
2067
+ segment_consolidate_slope_ratio_threshold = algo_param['segment_consolidate_slope_ratio_threshold'],
2068
+ sideway_price_condition_threshold = algo_param['sideway_price_condition_threshold']
2069
+ )
2070
+ candle_segments_jpg_file_name : str = f'{_ticker}_hicandles_w_segments_{test_fetch_start_date.strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date.strftime("%Y-%m-%d-%H-%M-%S")}_{algo_param["hi_candle_size"]}.jpg'
2071
+ plot_segments(pd_hi_candles, pd_hi_candles_partitions, candle_segments_jpg_file_name)
2072
+
2073
+ candle_segments_file_name : str = f'{_ticker}_hicandles_w_segments_{test_fetch_start_date.strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date.strftime("%Y-%m-%d-%H-%M-%S")}_{algo_param["hi_candle_size"]}.csv'
2074
+ pd_hi_candles_segments = segments_to_df(pd_hi_candles_partitions['segments'])
2075
+ pd_hi_candles_segments.to_csv(candle_segments_file_name)
2076
+
2077
+ pd_lo_candles = None
2078
+ _ticker = ticker.split(":")[0].replace("/","")
2079
+ target_candle_file_name : str = f'{_ticker}_candles_{test_fetch_start_date.strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date.strftime("%Y-%m-%d-%H-%M-%S")}_{algo_param["lo_candle_size"]}.csv'
2080
+ if algo_param['force_reload'] or not os.path.isfile(target_candle_file_name):
2081
+ if algo_param['force_reload'] and 'lo_candles_file' in algo_param and algo_param['lo_candles_file'] and os.path.isfile(algo_param['lo_candles_file']):
2082
+ pd_lo_candles : pd.DataFrame = pd.read_csv(algo_param['lo_candles_file'])
2083
+
2084
+ else:
2085
+ lo_candles : Dict[str, pd.DataFrame] = fetch_candles(
2086
+ start_ts=cutoff_ts,
2087
+ end_ts=int(test_end_date.timestamp()),
2088
+ exchange=exchange, normalized_symbols=[ ticker ],
2089
+ candle_size = algo_param['lo_candle_size'],
2090
+ num_candles_limit=algo_param['num_candles_limit'],
2091
+ logger=logger,
2092
+ cache_dir=algo_param['cache_candles'],
2093
+ list_ts_field=exchange.options['list_ts_field']
2094
+ )
2095
+ pd_lo_candles : pd.DataFrame = lo_candles[ticker]
2096
+ logger.info(f"pd_lo_candles fetched: {ticker} {pd_lo_candles.shape}, start: {cutoff_ts}, end: {int(test_end_date.timestamp())}")
2097
+ compute_candles_stats(pd_candles=pd_lo_candles, boillenger_std_multiples=algo_param['boillenger_std_multiples'], sliding_window_how_many_candles=algo_param['lo_stats_computed_over_how_many_candles'], slow_fast_interval_ratio=(algo_param['lo_stats_computed_over_how_many_candles']/algo_param['lo_ma_short_interval']), rsi_sliding_window_how_many_candles=algo_param['rsi_sliding_window_how_many_candles'], rsi_trend_sliding_window_how_many_candles=algo_param['rsi_trend_sliding_window_how_many_candles'], hurst_exp_window_how_many_candles=algo_param['hurst_exp_window_how_many_candles'], target_fib_level=algo_param['target_fib_level'], pypy_compat=algo_param['pypy_compat'])
2098
+ logger.info(f"pd_lo_candles {ticker} compute_candles_stats done. {target_candle_file_name}")
2099
+ pd_lo_candles.to_csv(target_candle_file_name)
2100
+
2101
+ if pd_lo_candles is not None and pd_lo_candles.shape[0]>0:
2102
+ first_candle_datetime = datetime.fromtimestamp(pd_lo_candles.iloc[0]['timestamp_ms']/1000)
2103
+ last_candle_datetime = datetime.fromtimestamp(pd_lo_candles.iloc[-1]['timestamp_ms']/1000)
2104
+
2105
+ assert(last_candle_datetime>first_candle_datetime)
2106
+ else:
2107
+ err_msg = f"{ticker} no lo candles?"
2108
+ logger.error(err_msg)
2109
+ else:
2110
+ pd_lo_candles : pd.DataFrame = pd.read_csv(target_candle_file_name)
2111
+ fix_column_types(pd_lo_candles)
2112
+ logger.info(f"pd_lo_candles {ticker} {pd_lo_candles.shape} loaded from {target_candle_file_name}")
2113
+
2114
+ if not algo_param['pypy_compat']:
2115
+ pd_lo_candles_partitions = partition_sliding_window(
2116
+ pd_candles = pd_lo_candles,
2117
+ sliding_window_how_many_candles = sliding_window_how_many_candles,
2118
+ smoothing_window_size_ratio = algo_param['smoothing_window_size_ratio'],
2119
+ linregress_stderr_threshold = algo_param['linregress_stderr_threshold'],
2120
+ max_recur_depth = algo_param['max_recur_depth'],
2121
+ min_segment_size_how_many_candles = algo_param['min_segment_size_how_many_candles'],
2122
+ segment_consolidate_slope_ratio_threshold = algo_param['segment_consolidate_slope_ratio_threshold'],
2123
+ sideway_price_condition_threshold = algo_param['sideway_price_condition_threshold']
2124
+ )
2125
+ candle_segments_jpg_file_name : str = f'{_ticker}_locandles_w_segments_{test_fetch_start_date.strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date.strftime("%Y-%m-%d-%H-%M-%S")}_{algo_param["lo_candle_size"]}.jpg'
2126
+ plot_segments(pd_lo_candles, pd_lo_candles_partitions, candle_segments_jpg_file_name)
2127
+
2128
+ candle_segments_file_name : str = f'{_ticker}_locandles_w_segments_{test_fetch_start_date.strftime("%Y-%m-%d-%H-%M-%S")}_{test_end_date.strftime("%Y-%m-%d-%H-%M-%S")}_{algo_param["hi_candle_size"]}.csv'
2129
+ pd_lo_candles_segments = segments_to_df(pd_lo_candles_partitions['segments'])
2130
+ pd_lo_candles_segments.to_csv(candle_segments_file_name)
2131
+
2132
+ all_exchange_candles[exchange.name][ticker]['hi_candles'] = pd_hi_candles
2133
+ all_exchange_candles[exchange.name][ticker]['lo_candles'] = pd_lo_candles
2134
+
2135
+ data_fetch_finish : float = time.time()
2136
+
2137
+ ####################################### STEP 2. Trade simulation #######################################
2138
+ logger.info(f"Start run_scenario")
2139
+ scenario_start : float = time.time()
2140
+ result = run_scenario(
2141
+ algo_param=algo_param,
2142
+ exchanges=exchanges,
2143
+ all_exchange_candles=all_exchange_candles,
2144
+ pd_ref_candles_fast=pd_ref_candles_fast,
2145
+ pd_ref_candles_slow=pd_ref_candles_slow,
2146
+ ref_candles_partitions=ref_candles_partitions,
2147
+ pd_hi_candles_partitions=pd_hi_candles_partitions,
2148
+ pd_lo_candles_partitions=pd_lo_candles_partitions,
2149
+ economic_calendars_loaded=economic_calendars_loaded,
2150
+ pd_economic_calendars=pd_economic_calendars,
2151
+ tickers=tickers,
2152
+
2153
+ order_notional_adj_func=order_notional_adj_func,
2154
+ allow_entry_initial_func=allow_entry_initial_func,
2155
+ allow_entry_final_func=allow_entry_final_func,
2156
+ allow_slice_entry_func=allow_slice_entry_func,
2157
+ sl_adj_func=sl_adj_func,
2158
+ trailing_stop_threshold_eval_func=trailing_stop_threshold_eval_func,
2159
+ pnl_eval_func=pnl_eval_func,
2160
+ tp_eval_func=tp_eval_func,
2161
+ sort_filter_universe_func=sort_filter_universe_func,
2162
+
2163
+ logger=logger,
2164
+
2165
+ pypy_compat=algo_param['pypy_compat'],
2166
+ plot_timeseries=True
2167
+ )
2168
+ scenario_finish = time.time()
2169
+
2170
+ data_fetch_elapsed_ms = (data_fetch_finish - data_fetch_start) * 1000
2171
+ scenario_elapsed_ms = (scenario_finish - scenario_start) * 1000
2172
+
2173
+ logger.info(f"Done run_scenario. data_fetch_elapsed_ms: {data_fetch_elapsed_ms} ms, scenario_elapsed_ms: {scenario_elapsed_ms}")
2174
+
2175
+ algo_result['orders'] = result['trades']
2176
+ result.pop('trades')
2177
+ algo_result['summary'] = {
2178
+ # Key parameters
2179
+ 'initial_cash' : algo_param['initial_cash'],
2180
+ 'entry_percent_initial_cash' : algo_param['entry_percent_initial_cash'],
2181
+ 'strategy_mode' : algo_param['strategy_mode'],
2182
+ 'ref_ema_num_days_fast' : algo_param['ref_ema_num_days_fast'],
2183
+ 'ref_ema_num_days_slow' : algo_param['ref_ema_num_days_slow'],
2184
+ 'long_above_ref_ema_short_below' : algo_param['long_above_ref_ema_short_below'],
2185
+ 'ref_price_vs_ema_percent_threshold' : algo_param['ref_price_vs_ema_percent_threshold'] if 'ref_price_vs_ema_percent_threshold' in algo_param else None,
2186
+ 'rsi_upper_threshold' : algo_param['rsi_upper_threshold'],
2187
+ 'rsi_lower_threshold' : algo_param['rsi_lower_threshold'],
2188
+ 'boillenger_std_multiples' : algo_param['boillenger_std_multiples'],
2189
+ 'ema_short_slope_threshold' : algo_param['ema_short_slope_threshold'] if 'ema_short_slope_threshold' in algo_param else None,
2190
+ 'num_intervals_block_pending_ecoevents' : algo_param['num_intervals_block_pending_ecoevents'],
2191
+ 'num_intervals_current_ecoevents' : algo_param['num_intervals_current_ecoevents'],
2192
+ 'sl_hard_percent' : algo_param['sl_hard_percent'],
2193
+ 'sl_percent_trailing' : algo_param['sl_percent_trailing'],
2194
+ 'use_gradual_tightened_trailing_stops' : algo_param['use_gradual_tightened_trailing_stops'],
2195
+ 'sl_num_intervals_delay' : algo_param['sl_num_intervals_delay'],
2196
+ 'tp_min_percent' : algo_param['tp_min_percent'],
2197
+ 'tp_max_percent' : algo_param['tp_max_percent'],
2198
+ 'asymmetric_tp_bps' : algo_param['asymmetric_tp_bps'],
2199
+
2200
+ # Key output
2201
+ 'realized_pnl' : result['realized_pnl'], # Commission already taken out
2202
+ 'total_commission' : result['total_commission'],
2203
+ 'hit_ratio' : result['hit_ratio'],
2204
+ 'num_tp' : result['num_tp'],
2205
+ 'num_sl' : result['num_sl'],
2206
+ 'num_hc' : result['num_hc'],
2207
+ 'num_entry' : result['num_entry'],
2208
+ 'data_fetch_elapsed_ms' : data_fetch_elapsed_ms,
2209
+ 'scenario_elapsed_ms' : scenario_elapsed_ms,
2210
+ 'num_exceptions' : len(result['exceptions'])
2211
+ }
2212
+
2213
+ all_exceptions = all_exceptions + list(result['exceptions'].items())
2214
+ logger.error(list(result['exceptions'].items()))
2215
+
2216
+ logger.info(f"Done ({i}/{len(algo_params)}) {algo_param['name_exclude_start_date']}")
2217
+ logger.info(json.dumps(algo_result['summary'], indent=4))
2218
+
2219
+ if result['realized_pnl']>best_realized_pnl or not best_algo_result:
2220
+ best_algo_result = algo_result['summary']
2221
+
2222
+ i = i + 1
2223
+
2224
+ finish = datetime.now()
2225
+ elapsed = (finish-start).seconds
2226
+
2227
+
2228
+ logger.info(f"Backtest done in {elapsed}sec over {len(algo_params)} scenario's with start_date {test_start_date} over {len(exchanges)} exchange(s) and {len(tickers)} tickers.")
2229
+
2230
+ logger.info(f"*** Best result realized_pnl: {best_algo_result['realized_pnl']}")
2231
+ logger.info(json.dumps(best_algo_result, indent=4))
2232
+
2233
+ pd_results = pd.DataFrame([ x['summary'] for x in algo_results])
2234
+ pd_results.loc['avg', 'realized_pnl'] = pd_results['realized_pnl'].mean(numeric_only=True, axis=0)
2235
+ pd_results.loc['avg', 'total_commission'] = pd_results['total_commission'].mean(numeric_only=True, axis=0)
2236
+ pd_results.loc['avg', 'hit_ratio'] = pd_results['hit_ratio'].mean(numeric_only=True, axis=0)
2237
+
2238
+ return algo_results
2239
+
2240
+ def parseargs():
2241
+ parser = argparse.ArgumentParser()
2242
+ parser.add_argument("--force_reload", help="Reload candles? Both candles and TA previously computed will be loaded from disk. Y or N (default)", default=False)
2243
+ parser.add_argument("--white_list_tickers", help="Comma seperated list, example: BTC/USDT:USDT,ETH/USDT:USDT,XRP/USDT:USDT ", default="BTC/USDT:USDT")
2244
+ parser.add_argument("--reference_ticker", help="This is ticker for bull / bear determination. The Northstar.", default="BTC/USDT:USDT")
2245
+ parser.add_argument("--block_entries_on_impacting_ecoevents", help="Block entries on economic event? Y (default) or N", default=True)
2246
+ parser.add_argument("--enable_sliced_entry", help="Block entries on economic event? Y or N (default)", default=False)
2247
+ parser.add_argument("--asymmetric_tp_bps", help="A positive asymmetric_tp_bps means you are taking deeper TPs. A negative asymmetric_tp_bps means shallower", default=0)
2248
+ args = parser.parse_args()
2249
+
2250
+ if args.force_reload:
2251
+ if args.force_reload=='Y':
2252
+ force_reload = True
2253
+ else:
2254
+ force_reload = False
2255
+ else:
2256
+ force_reload = False
2257
+
2258
+ if args.white_list_tickers:
2259
+ white_list_tickers = args.white_list_tickers.split(',')
2260
+
2261
+ reference_ticker = args.reference_ticker if args.reference_ticker else white_list_tickers[0]
2262
+
2263
+ if args.block_entries_on_impacting_ecoevents:
2264
+ if args.block_entries_on_impacting_ecoevents=='Y':
2265
+ block_entries_on_impacting_ecoevents = True
2266
+ else:
2267
+ block_entries_on_impacting_ecoevents = False
2268
+ else:
2269
+ block_entries_on_impacting_ecoevents = True
2270
+
2271
+ if args.enable_sliced_entry:
2272
+ if args.enable_sliced_entry=='Y':
2273
+ enable_sliced_entry = True
2274
+ else:
2275
+ enable_sliced_entry = False
2276
+ else:
2277
+ enable_sliced_entry = False
2278
+
2279
+ asymmetric_tp_bps = int(args.asymmetric_tp_bps)
2280
+
2281
+ return {
2282
+ 'force_reload': force_reload,
2283
+ 'white_list_tickers' : white_list_tickers,
2284
+ 'reference_ticker' : reference_ticker,
2285
+ 'block_entries_on_impacting_ecoevents' : block_entries_on_impacting_ecoevents,
2286
+ 'enable_sliced_entry' : enable_sliced_entry,
2287
+ 'asymmetric_tp_bps' : asymmetric_tp_bps
2288
+ }
2289
+
2290
+ def dump_trades_to_disk(
2291
+ algo_results,
2292
+ filename,
2293
+ logger
2294
+ ):
2295
+ flattenned_trades : List[Dict[str, Any]]= []
2296
+ for algo_result in algo_results:
2297
+ for order in algo_result['orders']:
2298
+ try:
2299
+ order['name'] = algo_result['param']['name']
2300
+ order['name_exclude_start_date'] = algo_result['param']['name_exclude_start_date']
2301
+
2302
+ order['initial_cash'] = algo_result['param']['initial_cash']
2303
+ order['entry_percent_initial_cash'] = algo_result['param']['entry_percent_initial_cash']
2304
+ order['clip_order_notional_to_best_volumes'] = algo_result['param']['clip_order_notional_to_best_volumes']
2305
+ order['target_position_size_percent_total_equity'] = algo_result['param']['target_position_size_percent_total_equity']
2306
+
2307
+ order['reference_ticker'] = algo_result['param']['reference_ticker']
2308
+ order['strategy_mode'] = algo_result['param']['strategy_mode']
2309
+ order['boillenger_std_multiples'] = algo_result['param']['boillenger_std_multiples']
2310
+ order['ema_short_slope_threshold'] = algo_result['param']['ema_short_slope_threshold'] if 'ema_short_slope_threshold' in algo_result['param'] else None
2311
+ order['how_many_last_candles'] = algo_result['param']['how_many_last_candles']
2312
+ order['last_candles_timeframe'] = algo_result['param']['last_candles_timeframe']
2313
+ order['enable_wait_entry'] = algo_result['param']['enable_wait_entry'] if 'enable_wait_entry' in algo_result['param'] else None
2314
+ order['allow_entry_sit_bb'] = algo_result['param']['allow_entry_sit_bb'] if 'allow_entry_sit_bb' in algo_result['param'] else None
2315
+ order['enable_sliced_entry'] = algo_result['param']['enable_sliced_entry']
2316
+ order['adj_sl_on_ecoevents'] = algo_result['param']['adj_sl_on_ecoevents']
2317
+ order['block_entries_on_impacting_ecoevents'] = algo_result['param']['block_entries_on_impacting_ecoevents']
2318
+ order['num_intervals_block_pending_ecoevents'] = algo_result['param']['num_intervals_block_pending_ecoevents']
2319
+ order['num_intervals_current_ecoevents'] = algo_result['param']['num_intervals_current_ecoevents']
2320
+ order['enable_hi_timeframe_confirm'] = algo_result['param']['enable_hi_timeframe_confirm'] if 'enable_hi_timeframe_confirm' in algo_result['param'] else None
2321
+ order['sl_num_intervals_delay'] = algo_result['param']['sl_num_intervals_delay']
2322
+ order['sl_hard_percent'] = algo_result['param']['sl_hard_percent']
2323
+ order['sl_percent_trailing'] = algo_result['param']['sl_percent_trailing']
2324
+ order['use_gradual_tightened_trailing_stops'] = algo_result['param']['use_gradual_tightened_trailing_stops']
2325
+ order['tp_min_percent'] = algo_result['param']['tp_min_percent']
2326
+ order['tp_max_percent'] = algo_result['param']['tp_max_percent']
2327
+ order['asymmetric_tp_bps'] = algo_result['param']['asymmetric_tp_bps']
2328
+
2329
+ order['hi_candle_size'] = algo_result['param']['hi_candle_size']
2330
+ order['hi_stats_computed_over_how_many_candles'] = algo_result['param']['hi_stats_computed_over_how_many_candles']
2331
+ order['hi_how_many_candles'] = algo_result['param']['hi_how_many_candles']
2332
+ order['hi_ma_short_interval'] = algo_result['param']['hi_ma_short_interval']
2333
+ order['hi_ma_long_interval'] = algo_result['param']['hi_ma_long_interval']
2334
+
2335
+ order['lo_candle_size'] = algo_result['param']['lo_candle_size']
2336
+ order['lo_stats_computed_over_how_many_candles'] = algo_result['param']['lo_stats_computed_over_how_many_candles']
2337
+ order['lo_how_many_candles'] = algo_result['param']['lo_how_many_candles']
2338
+ order['lo_ma_short_interval'] = algo_result['param']['lo_ma_short_interval']
2339
+ order['lo_ma_long_interval'] = algo_result['param']['lo_ma_long_interval']
2340
+
2341
+ order['target_fib_level'] = algo_result['param']['target_fib_level']
2342
+ order['rsi_sliding_window_how_many_candles'] = algo_result['param']['rsi_sliding_window_how_many_candles']
2343
+ order['rsi_trend_sliding_window_how_many_candles'] = algo_result['param']['rsi_trend_sliding_window_how_many_candles']
2344
+ order['hurst_exp_window_how_many_candles'] = algo_result['param']['hurst_exp_window_how_many_candles']
2345
+
2346
+ order['ref_ema_num_days_fast'] = algo_result['param']['ref_ema_num_days_fast']
2347
+ order['re_ema_num_days_slow'] = algo_result['param']['ref_ema_num_days_slow']
2348
+ order['long_above_ref_ema_short_below'] = algo_result['param']['long_above_ref_ema_short_below']
2349
+ order['ref_price_vs_ema_percent_threshold'] = algo_result['param']['ref_price_vs_ema_percent_threshold'] if 'ref_price_vs_ema_percent_threshold' in algo_result['param'] else None
2350
+ order['rsi_upper_threshold'] = algo_result['param']['rsi_upper_threshold']
2351
+ order['rsi_lower_threshold'] = algo_result['param']['rsi_lower_threshold']
2352
+
2353
+ order['id'] = str(uuid.uuid4())
2354
+ order['trade_year'] = order['trade_datetime'].year
2355
+ order['trade_month'] = order['trade_datetime'].month
2356
+ order['trade_day'] = order['trade_datetime'].day
2357
+ order['trade_dayofweek'] = order['dayofweek']
2358
+ order['trade_week_of_month'] = timestamp_to_week_of_month(
2359
+ int(order['trade_datetime'].timestamp() * 1000)
2360
+ )
2361
+
2362
+ flattenned_trades.append(order)
2363
+
2364
+ except Exception as error:
2365
+ logger.error(f"Error while processing flattenned trades! {error}")
2366
+
2367
+ if len(flattenned_trades)>0:
2368
+ pd_flattenned_trades = pd.DataFrame(flattenned_trades)
2369
+ pd_flattenned_trades.to_csv(filename)
2370
+
2371
+ logger.info(f"Trade extract: {filename}")