siglab-py 0.1.30__py3-none-any.whl → 0.6.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. siglab_py/algo/__init__.py +0 -0
  2. siglab_py/algo/macdrsi_crosses_15m_tc_strategy.py +107 -0
  3. siglab_py/algo/strategy_base.py +122 -0
  4. siglab_py/algo/strategy_executor.py +1308 -0
  5. siglab_py/algo/tp_algo.py +529 -0
  6. siglab_py/backtests/__init__.py +0 -0
  7. siglab_py/backtests/backtest_core.py +2405 -0
  8. siglab_py/backtests/coinflip_15m_crypto.py +432 -0
  9. siglab_py/backtests/fibonacci_d_mv_crypto.py +541 -0
  10. siglab_py/backtests/macdrsi_crosses_15m_tc_crypto.py +473 -0
  11. siglab_py/constants.py +26 -1
  12. siglab_py/exchanges/binance.py +38 -0
  13. siglab_py/exchanges/deribit.py +83 -0
  14. siglab_py/exchanges/futubull.py +12 -2
  15. siglab_py/market_data_providers/candles_provider.py +11 -10
  16. siglab_py/market_data_providers/candles_ta_provider.py +5 -5
  17. siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +4 -4
  18. siglab_py/market_data_providers/futu_candles_ta_to_csv.py +7 -2
  19. siglab_py/market_data_providers/google_monitor.py +320 -0
  20. siglab_py/market_data_providers/orderbooks_provider.py +15 -12
  21. siglab_py/market_data_providers/tg_monitor.py +428 -0
  22. siglab_py/market_data_providers/{test_provider.py → trigger_provider.py} +9 -8
  23. siglab_py/ordergateway/client.py +172 -41
  24. siglab_py/ordergateway/encrypt_keys_util.py +1 -1
  25. siglab_py/ordergateway/gateway.py +456 -347
  26. siglab_py/ordergateway/test_ordergateway.py +8 -7
  27. siglab_py/tests/integration/market_data_util_tests.py +75 -2
  28. siglab_py/tests/unit/analytic_util_tests.py +47 -12
  29. siglab_py/tests/unit/market_data_util_tests.py +45 -1
  30. siglab_py/tests/unit/simple_math_tests.py +252 -0
  31. siglab_py/tests/unit/trading_util_tests.py +65 -0
  32. siglab_py/util/analytic_util.py +476 -67
  33. siglab_py/util/datetime_util.py +39 -0
  34. siglab_py/util/market_data_util.py +528 -98
  35. siglab_py/util/module_util.py +40 -0
  36. siglab_py/util/notification_util.py +78 -0
  37. siglab_py/util/retry_util.py +16 -3
  38. siglab_py/util/simple_math.py +262 -0
  39. siglab_py/util/slack_notification_util.py +59 -0
  40. siglab_py/util/trading_util.py +118 -0
  41. {siglab_py-0.1.30.dist-info → siglab_py-0.6.33.dist-info}/METADATA +5 -9
  42. siglab_py-0.6.33.dist-info/RECORD +56 -0
  43. {siglab_py-0.1.30.dist-info → siglab_py-0.6.33.dist-info}/WHEEL +1 -1
  44. siglab_py-0.1.30.dist-info/RECORD +0 -34
  45. {siglab_py-0.1.30.dist-info → siglab_py-0.6.33.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1308 @@
1
+ # type: ignore
2
+ import sys
3
+ import traceback
4
+ import os
5
+ import logging
6
+ from dotenv import load_dotenv
7
+ import argparse
8
+ import re
9
+ from datetime import datetime, timedelta, timezone
10
+ import time
11
+ import arrow
12
+ from zoneinfo import ZoneInfo
13
+ from typing import List, Dict, Any, Union, Callable
14
+ from io import StringIO
15
+ import json
16
+ import asyncio
17
+ from redis import StrictRedis
18
+ import pandas as pd
19
+ import numpy as np
20
+ import inspect
21
+ from tabulate import tabulate
22
+
23
+ from siglab_py.exchanges.any_exchange import AnyExchange
24
+ from siglab_py.ordergateway.client import DivisiblePosition, execute_positions
25
+ from siglab_py.util.datetime_util import parse_trading_window
26
+ from siglab_py.util.market_data_util import async_instantiate_exchange, interval_to_ms
27
+ from siglab_py.util.trading_util import calc_eff_trailing_sl
28
+ from siglab_py.util.notification_util import dispatch_notification
29
+ from siglab_py.util.aws_util import AwsKmsUtil
30
+
31
+ from siglab_py.constants import INVALID, JSON_SERIALIZABLE_TYPES, LogLevel, PositionStatus, OrderSide
32
+
33
+
34
+ '''
35
+ For dry-runs/testing, swap back to StrategyBase, it will not fire an order.
36
+ '''
37
+ # from strategy_base import StrategyBase as TargetStrategy # Import whatever strategy subclassed from StrategyBase here!
38
+ from macd_crosses_targets_from_level_15m_tc_strategy import MACDCrossesTargetFromLevel15mTCStrategy as TargetStrategy
39
+
40
+ current_filename = os.path.basename(__file__)
41
+
42
+ '''
43
+ Error: RuntimeError: aiodns needs a SelectorEventLoop on Windows.
44
+ Hack, by far the filthest hack I done in my career: Set SelectorEventLoop on Windows
45
+ '''
46
+ if sys.platform == 'win32':
47
+ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
48
+
49
+ '''
50
+ Usage:
51
+ Step 1. Start candles_providers
52
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
53
+ python candles_provider.py --provider_id aaa --candle_size 1h --how_many_candles 720 --redis_ttl_ms 3600000
54
+ python candles_provider.py --provider_id bbb --candle_size 15m --how_many_candles 672 --redis_ttl_ms 3600000
55
+
56
+ Note: how_many_candles should be larger than compute_candles_stats.sliding_window_how_many_candles by a few times.
57
+ 720 = 24 x 30 days
58
+ 672 = 4 x 24 x 7 days (Each hour has four 15m candles. 672 candles means 672 15m candles)
59
+
60
+ Step 2. Start candles_ta_providers
61
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
62
+ python candles_ta_provider.py --candle_size 1h --ma_long_intervals 48 --ma_short_intervals 12 --boillenger_std_multiples 2 --redis_ttl_ms 3600000 --processed_hash_queue_max_size 999 --pypy_compat N
63
+ python candles_ta_provider.py --candle_size 15m --ma_long_intervals 150 --ma_short_intervals 5 --boillenger_std_multiples 2 --redis_ttl_ms 3600000 --processed_hash_queue_max_size 999 --pypy_compat N
64
+
65
+ Note, for 15m bars, a sliding window of size 150 means 150 x 15m = 2250 minutes
66
+
67
+ Step 3. Start orderbooks_provider
68
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
69
+ python orderbooks_provider.py --provider_id ccc --instance_capacity 25 --ts_delta_observation_ms_threshold 150 --ts_delta_consecutive_ms_threshold 150 --redis_ttl_ms 3600000
70
+
71
+ Step 4. To trigger candles_providers and orderbooks_provider
72
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
73
+ python trigger_provider.py --provider_id aaa --tickers "okx_linear|SOL/USDT:USDT"
74
+ python trigger_provider.py --provider_id bbb --tickers "okx_linear|SOL/USDT:USDT"
75
+ python trigger_provider.py --provider_id ccc --tickers "okx_linear|SOL/USDT:USDT"
76
+
77
+ Step 5. Start strategy_executor
78
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
79
+ python strategy_executor.py --gateway_id hyperliquid_01 --default_type linear --rate_limit_ms 100 --encrypt_decrypt_with_aws_kms Y --aws_kms_key_id xxx --apikey xxx --secret xxx --ticker SUSHI/USDC:USDC --order_type limit --amount_base_ccy 45 --residual_pos_usdt_threshold 10 --slices 3 --wait_fill_threshold_ms 15000 --leg_room_bps 5 --tp_min_percent 1.5 --tp_max_percent 2.5 --sl_percent_trailing 50 --sl_hard_percent 1 --reversal_num_intervals 3 --slack_info_url https://hooks.slack.com/services/xxx --slack_critial_url https://hooks.slack.com/services/xxx --slack_alert_url https://hooks.slack.com/services/xxx --economic_calendar_source xxx --block_entry_impacting_events Y --num_intervals_current_ecoevents 96 --trading_window_start Mon_00:00 --trading_window_end Fri_17:00
80
+
81
+ Step 6. Start order gateway
82
+ Top of the script for instructions
83
+ https://github.com/r0bbar/siglab/blob/master/siglab_py/ordergateway/gateway.py
84
+
85
+ Debug from VSCode, launch.json:
86
+ {
87
+ "version": "0.2.0",
88
+ "configurations": [
89
+ {
90
+ "name": "Python Debugger: Current File",
91
+ "type": "debugpy",
92
+ "request": "launch",
93
+ "program": "${file}",
94
+ "console": "integratedTerminal",
95
+ "args" : [
96
+ "--gateway_id", "hyperliquid_01",
97
+ "--default_type", "linear",
98
+ "--rate_limit_ms", "100",
99
+ "--encrypt_decrypt_with_aws_kms", "Y",
100
+ "--aws_kms_key_id", "xxx",
101
+ "--apikey", "xxx",
102
+ "--secret", "xxx",
103
+
104
+ "--ticker", "SOL/USDC:USDC",
105
+ "--order_type", "limit",
106
+ "--amount_base_ccy", "3",
107
+ "--slices", "3",
108
+ "--wait_fill_threshold_ms", "15000",
109
+ "--leg_room_bps", "5",
110
+ "--tp_min_percent", "3",
111
+ "--tp_max_percent", "5",
112
+ "--sl_percent_trailing", "35",
113
+ "--sl_hard_percent", "2.5",
114
+ "--reversal_num_intervals", "3",
115
+
116
+ "--economic_calendar_source", "xxx",
117
+ "--block_entry_impacting_events","Y",
118
+ "--num_intervals_current_ecoevents", "96",
119
+
120
+ "--trading_window_start", "Mon_00:00",
121
+ "--trading_window_end", "Fri_17:00",
122
+
123
+ "--slack_info_url", "https://hooks.slack.com/services/xxx",
124
+ "--slack_critial_url", "https://hooks.slack.com/services/xxx",
125
+ "--slack_alert_url", "https://hooks.slack.com/services/xxx",
126
+ ],
127
+ "env": {
128
+ "PYTHONPATH": "${workspaceFolder}"
129
+ }
130
+ }
131
+ ]
132
+ }
133
+ '''
134
+ param : Dict = {
135
+ 'trailing_sl_min_percent_linear': 1.0, # This is threshold used for tp_algo to decide if use linear stops tightening, or non-linear. If tp_max_percent far (>100bps), there's more uncertainty if target can be reached: Go with linear.
136
+ 'non_linear_pow' : 5, # For non-linear trailing stops tightening.
137
+
138
+ 'rolldate_tz' : 'Asia/Hong_Kong', # Roll date based on what timezone?
139
+
140
+ # economic_calendar related
141
+ 'mapped_regions' : [ 'united_states' ],
142
+
143
+ 'mapped_event_codes' : [
144
+ 'core_inflation_rate_mom', 'core_inflation_rate_yoy',
145
+ 'inflation_rate_mom', 'inflation_rate_yoy',
146
+ 'fed_interest_rate_decision',
147
+ 'fed_chair_speech',
148
+ 'core_pce_price_index_mom',
149
+ 'core_pce_price_index_yoy',
150
+ 'unemployment_rate',
151
+ 'non_farm_payrolls',
152
+ 'gdp_growth_rate_qoq_adv',
153
+ 'gdp_growth_rate_qoq_final',
154
+ 'gdp_growth_rate_yoy',
155
+
156
+ 'manual_event'
157
+ ],
158
+ 'max_current_economic_calendar_age_sec' : 10,
159
+ 'num_intervals_current_ecoevents' : 4* 24, # x4 because lo_interval "15m" per 'lo_candles_w_ta_topic': If you want to convert to 24 hrs ...
160
+
161
+ "loop_freq_ms" : 5000, # reduce this if you need trade faster
162
+
163
+ 'current_filename' : current_filename,
164
+
165
+ 'notification' : {
166
+ 'footer' : None,
167
+
168
+ # slack webhook url's for notifications
169
+ 'slack' : {
170
+ 'info' : { 'webhook_url' : None },
171
+ 'critical' : { 'webhook_url' : None },
172
+ 'alert' : { 'webhook_url' : None },
173
+ }
174
+ },
175
+
176
+ 'mds' : {
177
+ 'topics' : {
178
+ "hi_candles_provider_topic" : "mds_assign_aaa",
179
+ "lo_candles_provider_topic" : "mds_assign_bbb",
180
+ "orderbooks_provider_topic" : "mds_assign_ccc",
181
+ "hi_candles_w_ta_topic" : "candles_ta-SOL-USDT-SWAP-okx-1h",
182
+ "lo_candles_w_ta_topic" : "candles_ta-SOL-USDT-SWAP-okx-15m",
183
+ "orderbook_topic" : "orderbooks_SOL/USDT:USDT_okx",
184
+
185
+ "full_economic_calendars_topic" : "economic_calendars_full_$SOURCE$",
186
+ },
187
+ 'redis' : {
188
+ 'host' : 'localhost',
189
+ 'port' : 6379,
190
+ 'db' : 0,
191
+ 'ttl_ms' : 1000*60*15 # 15 min?
192
+ }
193
+ }
194
+ }
195
+
196
+ logging.Formatter.converter = time.gmtime
197
+ logger = logging.getLogger()
198
+ log_level = logging.INFO # DEBUG --> INFO --> WARNING --> ERROR
199
+ logger.setLevel(log_level)
200
+ format_str = '%(asctime)s %(message)s'
201
+ formatter = logging.Formatter(format_str)
202
+ sh = logging.StreamHandler()
203
+ sh.setLevel(log_level)
204
+ sh.setFormatter(formatter)
205
+ logger.addHandler(sh)
206
+
207
+ POSITION_CACHE_FILE_NAME = f"{TargetStrategy.__name__}_position_cache_$GATEWAY_ID$.csv"
208
+ POSITION_CACHE_COLUMNS = [
209
+ 'exchange', 'ticker',
210
+ 'status',
211
+ 'pos', 'pos_usdt', 'multiplier', 'created', 'closed',
212
+ 'pos_entries',
213
+ 'spread_bps', 'entry_px', 'close_px',
214
+ 'ob_mid', 'ob_best_bid', 'ob_best_ask',
215
+ 'unreal_live',
216
+ 'max_unreal_live',
217
+ 'max_pain',
218
+ 'max_recovered_pnl',
219
+ 'pnl_live_bps',
220
+ 'pnl_open_bps',
221
+ 'max_unreal_live_bps',
222
+ 'max_unreal_open_bps',
223
+
224
+ 'tp_max_target',
225
+ 'tp_min_target',
226
+
227
+ 'running_sl_percent_hard',
228
+ 'sl_trailing_min_threshold_crossed',
229
+ 'sl_percent_trailing',
230
+ 'loss_trailing'
231
+ ]
232
+
233
+ ORDERHIST_CACHE_FILE_NAME = f"{TargetStrategy.__name__}_orderhist_cache_$GATEWAY_ID$.csv"
234
+ ORDERHIST_CACHE_COLUMNS = [ 'datetime', 'exchange', 'ticker', 'reason', 'side', 'avg_price', 'amount', 'pnl', 'pnl_bps', 'max_pain' ]
235
+
236
+ def log(message : str, log_level : LogLevel = LogLevel.INFO):
237
+ if log_level.value<LogLevel.WARNING.value:
238
+ logger.info(message)
239
+
240
+ elif log_level.value==LogLevel.WARNING.value:
241
+ logger.warning(message)
242
+
243
+ elif log_level.value==LogLevel.ERROR.value:
244
+ logger.error(message)
245
+
246
+ def parse_args():
247
+ parser = argparse.ArgumentParser()
248
+
249
+ parser.add_argument("--gateway_id", help="gateway_id: Where are you sending your order?", default=None)
250
+
251
+ parser.add_argument("--default_type", help="default_type: spot, linear, inverse, futures ...etc", default='linear')
252
+ parser.add_argument("--rate_limit_ms", help="rate_limit_ms: Check your exchange rules", default=100)
253
+
254
+ parser.add_argument("--trading_window_start", help="Start of trading window. Set as blank if trading not confined to particular trading window. Format: Fri_00:00", default='')
255
+ parser.add_argument("--trading_window_end", help="End of trading window.", default='')
256
+
257
+ parser.add_argument("--encrypt_decrypt_with_aws_kms", help="Y or N. If encrypt_decrypt_with_aws_kms=N, pass in apikey, secret and passphrase unencrypted (Not recommended, for testing only). If Y, they will be decrypted using AMS KMS key.", default='N')
258
+ parser.add_argument("--aws_kms_key_id", help="AWS KMS key ID", default=None)
259
+ parser.add_argument("--apikey", help="Exchange apikey", default=None)
260
+ parser.add_argument("--secret", help="Exchange secret", default=None)
261
+ parser.add_argument("--passphrase", help="Exchange passphrase", default=None)
262
+ parser.add_argument("--verbose", help="logging verbosity, Y or N (default).", default='N')
263
+
264
+ parser.add_argument("--ticker", help="Ticker you're trading. Example BTC/USDC:USDC", default=None)
265
+ parser.add_argument("--order_type", help="Order type: market or limit", default=None)
266
+ parser.add_argument("--amount_base_ccy", help="Order amount in base ccy (Not # contracts). Always positive, even for sell trades.", default=None)
267
+ parser.add_argument("--residual_pos_usdt_threshold", help="If pos_usdt<=residual_pos_usdt_threshold (in USD, default $100), PositionStatus will be marked to CLOSED.", default=100)
268
+ parser.add_argument("--leg_room_bps", help="Leg room, for Limit orders only. A more positive leg room is a more aggressive order to get filled. i.e. Buy at higher price, Sell at lower price.", default=5)
269
+ parser.add_argument("--slices", help="Algo can break down larger order into smaller slices. Default: 1", default=1)
270
+ parser.add_argument("--wait_fill_threshold_ms", help="Limit orders will be cancelled if not filled within this time. Remainder will be sent off as market order.", default=15000)
271
+
272
+ parser.add_argument("--tp_min_percent", help="For trailing stops. Min TP in percent, i.e. No TP until pnl at least this much.", default=None)
273
+ parser.add_argument("--tp_max_percent", help="For trailing stops. Max TP in percent, i.e. Price target", default=None)
274
+ parser.add_argument("--sl_percent_trailing", help="For trailing stops. trailing SL in percent, please refer to trading_util.calc_eff_trailing_sl for documentation.", default=None)
275
+ parser.add_argument("--default_effective_tp_trailing_percent", help="Default for sl_percent_trailing when pnl still below tp_min_percent. Default: float('inf'), meaing trailing stop mechanism will not be activated.", default=float('inf'))
276
+ parser.add_argument("--sl_adj_percent", help="Increment used in SL adj in percent.", default=0)
277
+ parser.add_argument("--sl_hard_percent", help="Hard stop in percent.", default=2)
278
+ parser.add_argument("--sl_num_intervals_delay", help="Number of intervals to wait before re-entry allowed after SL. Default 1", default=1)
279
+ parser.add_argument("--reversal_num_intervals", help="How many reversal candles to confirm reversal?", default=3)
280
+ parser.add_argument("--trailing_sl_min_percent_linear", help="This is threshold used for tp_algo to decide if use linear stops tightening, or non-linear. If tp_max_percent far (>200bps for example), there's more uncertainty if target can be reached: Go with linear. Default: 2% (200 bps)", default=2.0)
281
+ parser.add_argument("--non_linear_pow", help="For non-linear trailing stops tightening, have a look at call to 'calc_eff_trailing_sl'. Default: 5", default=5)
282
+ parser.add_argument("--recover_min_percent", help="This is minimum unreal pnl recovery when your trade is red before trailing stop mechanism will be activated: max_recovered_pnl_percent_notional>=recover_min_percent and abs(max_pain_percent_notional)>=recover_max_pain_percent. Default: float('inf'), meaing trailing stop won't be fired.", default=float('inf'))
283
+ parser.add_argument("--recover_max_pain_percent", help="This is minimum max_pain endured when your trade is red. For trailing stop mechanism will be activated: max_recovered_pnl_percent_notional>=recover_min_percent and abs(max_pain_percent_notional)>=recover_max_pain_percent. Default: float('inf'), meaing trailing stop mechanism will remain inactive.", default=float('inf'))
284
+
285
+ parser.add_argument("--economic_calendar_source", help="Source of economic calendar'. Default: None", default=None)
286
+ parser.add_argument("--num_intervals_current_ecoevents", help="Num intervals to block on incoming/outgoing economic events. For 15m bars for example, num_intervals_current_ecoevents=4*24 means 24 hours. Default: 0", default=0)
287
+ parser.add_argument("--block_entry_impacting_events", help="Block entries if any impacting economic events 'impacting_economic_calendars'. Default N", default='N')
288
+
289
+ parser.add_argument("--loop_freq_ms", help="Loop delays. Reduce this if you want to trade faster.", default=5000)
290
+
291
+ parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
292
+ parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
293
+ parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
294
+
295
+ args = parser.parse_args()
296
+
297
+ param['gateway_id'] = args.gateway_id
298
+ param['default_type'] = args.default_type
299
+ param['rate_limit_ms'] = int(args.rate_limit_ms)
300
+
301
+ param['trading_window_start'] = args.trading_window_start
302
+ param['trading_window_end'] = args.trading_window_end
303
+
304
+ if args.encrypt_decrypt_with_aws_kms:
305
+ if args.encrypt_decrypt_with_aws_kms=='Y':
306
+ param['encrypt_decrypt_with_aws_kms'] = True
307
+ else:
308
+ param['encrypt_decrypt_with_aws_kms'] = False
309
+ else:
310
+ param['encrypt_decrypt_with_aws_kms'] = False
311
+
312
+ param['aws_kms_key_id'] = args.aws_kms_key_id
313
+ param['apikey'] = args.apikey
314
+ param['secret'] = args.secret
315
+ param['passphrase'] = args.passphrase
316
+ if args.verbose:
317
+ if args.verbose=='Y':
318
+ param['verbose'] = True
319
+ else:
320
+ param['verbose'] = False
321
+ else:
322
+ param['verbose'] = False
323
+
324
+ param['ticker'] = args.ticker
325
+ param['order_type'] = args.order_type
326
+ param['amount_base_ccy'] = float(args.amount_base_ccy)
327
+ param['residual_pos_usdt_threshold'] = float(args.residual_pos_usdt_threshold)
328
+ param['leg_room_bps'] = int(args.leg_room_bps)
329
+ param['slices'] = int(args.slices)
330
+ param['wait_fill_threshold_ms'] = int(args.wait_fill_threshold_ms)
331
+
332
+ param['tp_min_percent'] = float(args.tp_min_percent)
333
+ param['tp_max_percent'] = float(args.tp_max_percent)
334
+ param['sl_percent_trailing'] = float(args.sl_percent_trailing)
335
+ param['default_effective_tp_trailing_percent'] = float(args.default_effective_tp_trailing_percent)
336
+ param['sl_adj_percent'] = float(args.sl_adj_percent)
337
+ param['sl_hard_percent'] = float(args.sl_hard_percent)
338
+ param['sl_num_intervals_delay'] = int(args.sl_num_intervals_delay)
339
+ param['reversal_num_intervals'] = int(args.reversal_num_intervals)
340
+ param['trailing_sl_min_percent_linear'] = float(args.trailing_sl_min_percent_linear)
341
+ param['non_linear_pow'] = float(args.non_linear_pow)
342
+ param['recover_min_percent'] = float(args.recover_min_percent)
343
+ param['recover_max_pain_percent'] = float(args.recover_max_pain_percent)
344
+
345
+ param['economic_calendar_source'] = args.economic_calendar_source
346
+
347
+ if args.block_entry_impacting_events:
348
+ if args.block_entry_impacting_events=='Y':
349
+ param['block_entry_impacting_events'] = True
350
+ else:
351
+ param['block_entry_impacting_events'] = False
352
+ else:
353
+ param['block_entry_impacting_events'] = False
354
+
355
+ param['loop_freq_ms'] = int(args.loop_freq_ms)
356
+
357
+ param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
358
+ param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
359
+ param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
360
+
361
+ param['notification']['footer'] = f"From {param['current_filename']} {param['gateway_id']}"
362
+
363
+ def init_redis_client() -> StrictRedis:
364
+ redis_client : StrictRedis = StrictRedis(
365
+ host = param['mds']['redis']['host'],
366
+ port = param['mds']['redis']['port'],
367
+ db = 0,
368
+ ssl = False
369
+ )
370
+ try:
371
+ redis_client.keys()
372
+ except ConnectionError as redis_conn_error:
373
+ err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
374
+ raise ConnectionError(err_msg)
375
+
376
+ return redis_client
377
+
378
+ def fetch_economic_events(redis_client, topic) -> List[Dict]:
379
+ restored = redis_client.get(topic)
380
+ if restored:
381
+ restored = json.loads(restored)
382
+ for economic_calendar in restored:
383
+ '''
384
+ Format:
385
+ 'calendar_id': '1234567'
386
+ 'category': 'Building Permits'
387
+ 'region': 'united_states'
388
+ 'event': 'Building Permits Prel'
389
+ 'event_code': 'building_permits_prel'
390
+ 'ccy': ''
391
+ 'importance': '3'
392
+ 'actual': 1.386
393
+ 'forecast': 1.45
394
+ 'previous': 1.44
395
+ 'pos_neg': 'bearish'
396
+ 'datetime': datetime.datetime(2024, 6, 20, 20, 30)
397
+ 'calendar_item_timestamp_ms': 1718886600000
398
+ 'calendar_item_timestamp_sec': 1718886600
399
+ 'source': 'xxx'
400
+ '''
401
+ economic_calendar['datetime'] = arrow.get(economic_calendar['datetime']).datetime
402
+ economic_calendar['datetime'] = economic_calendar['datetime'].replace(tzinfo=None)
403
+ return restored
404
+
405
+ async def main(
406
+ order_notional_adj_func : Callable[..., Dict[str, float]],
407
+ allow_entry_initial_func : Callable[..., Dict[str, bool]],
408
+ allow_entry_final_func : Callable[..., Dict[str, Union[bool, float, None]]],
409
+ sl_adj_func : Callable[..., Dict[str, float]],
410
+ trailing_stop_threshold_eval_func : Callable[..., Dict[str, float]],
411
+ tp_eval_func : Callable[..., bool]
412
+ ):
413
+ parse_args()
414
+
415
+ redis_client : StrictRedis = init_redis_client()
416
+
417
+ gateway_id : str = param['gateway_id']
418
+
419
+ fh = logging.FileHandler(f"strategy_executor_{param['gateway_id']}_{TargetStrategy.__name__}.log")
420
+ fh.setLevel(log_level)
421
+ fh.setFormatter(formatter)
422
+ logger.addHandler(fh)
423
+
424
+ exchange_name : str = gateway_id.split('_')[0]
425
+ ticker : str = param['ticker']
426
+ ordergateway_pending_orders_topic : str = 'ordergateway_pending_orders_$GATEWAY_ID$'
427
+ ordergateway_pending_orders_topic : str = ordergateway_pending_orders_topic.replace("$GATEWAY_ID$", gateway_id)
428
+
429
+ ordergateway_executions_topic : str = "ordergateway_executions_$GATEWAY_ID$"
430
+ ordergateway_executions_topic : str = ordergateway_executions_topic.replace("$GATEWAY_ID$", gateway_id)
431
+
432
+ hi_candles_w_ta_topic : str = param['mds']['topics']['hi_candles_w_ta_topic']
433
+ lo_candles_w_ta_topic : str = param['mds']['topics']['lo_candles_w_ta_topic']
434
+ orderbook_topic : str = param['mds']['topics']['orderbook_topic']
435
+
436
+ hi_candles_provider_topic : str = param['mds']['topics']['hi_candles_provider_topic']
437
+ lo_candles_provider_topic : str = param['mds']['topics']['lo_candles_provider_topic']
438
+ orderbooks_provider_topic : str = param['mds']['topics']['orderbooks_provider_topic']
439
+
440
+ # economic_calendar_source
441
+ full_economic_calendars_topic : str = param['mds']['topics']['full_economic_calendars_topic']
442
+ full_economic_calendars_topic = full_economic_calendars_topic.replace('$SOURCE$', param['economic_calendar_source']) if param['economic_calendar_source'] else None
443
+
444
+ log(f"hi_candles_w_ta_topic: {hi_candles_w_ta_topic}")
445
+ log(f"lo_candles_w_ta_topic: {lo_candles_w_ta_topic}")
446
+ log(f"hi_candles_provider_topic: {hi_candles_provider_topic}")
447
+ log(f"lo_candles_provider_topic: {lo_candles_provider_topic}")
448
+ log(f"orderbook_topic: {orderbook_topic}")
449
+ log(f"ordergateway_pending_orders_topic: {ordergateway_pending_orders_topic}")
450
+ log(f"ordergateway_executions_topic: {ordergateway_executions_topic}")
451
+ log(f"full_economic_calendars_topic: {full_economic_calendars_topic}")
452
+
453
+ # aliases
454
+ algo_param = param
455
+ strategic_specific_algo_param = TargetStrategy.get_strategy_algo_params()
456
+ for entry in strategic_specific_algo_param:
457
+ algo_param[entry['key']] = entry['val']
458
+
459
+ hi_candle_size : str = hi_candles_w_ta_topic.split('-')[-1]
460
+ lo_candle_size : str = lo_candles_w_ta_topic.split('-')[-1]
461
+ hi_interval = hi_candle_size[-1]
462
+ hi_num_intervals : int = int(hi_candle_size.replace(hi_interval,''))
463
+ hi_interval_ms : int = interval_to_ms(hi_interval) * hi_num_intervals
464
+ lo_interval = lo_candle_size[-1]
465
+ lo_num_intervals : int = int(lo_candle_size.replace(lo_interval,''))
466
+ lo_interval_ms : int = interval_to_ms(lo_interval) * lo_num_intervals
467
+
468
+ num_intervals_current_ecoevents_ms : int = lo_interval_ms * param['num_intervals_current_ecoevents']
469
+
470
+ strategy_indicators = TargetStrategy.get_strategy_indicators()
471
+ position_cache_columns = POSITION_CACHE_COLUMNS + strategy_indicators
472
+ pd_position_cache = pd.DataFrame(columns=position_cache_columns)
473
+
474
+ orderhist_cache = pd.DataFrame(columns=ORDERHIST_CACHE_COLUMNS)
475
+
476
+ notification_params : Dict[str, Any] = param['notification']
477
+
478
+ if not param['apikey']:
479
+ log("Loading credentials from .env")
480
+
481
+ load_dotenv()
482
+
483
+ encrypt_decrypt_with_aws_kms = os.getenv('ENCRYPT_DECRYPT_WITH_AWS_KMS')
484
+ encrypt_decrypt_with_aws_kms = True if encrypt_decrypt_with_aws_kms=='Y' else False
485
+
486
+ api_key : str = str(os.getenv('APIKEY'))
487
+ secret : str = str(os.getenv('SECRET'))
488
+ passphrase : str = str(os.getenv('PASSPHRASE'))
489
+ else:
490
+ log("Loading credentials from command line args")
491
+
492
+ encrypt_decrypt_with_aws_kms = param['encrypt_decrypt_with_aws_kms']
493
+ api_key : str = param['apikey']
494
+ secret : str = param['secret']
495
+ passphrase : str = param['passphrase']
496
+
497
+ if encrypt_decrypt_with_aws_kms:
498
+ aws_kms_key_id = str(os.getenv('AWS_KMS_KEY_ID'))
499
+
500
+ aws_kms = AwsKmsUtil(key_id=aws_kms_key_id, profile_name=None)
501
+ api_key = aws_kms.decrypt(api_key.encode())
502
+ secret = aws_kms.decrypt(secret.encode())
503
+ if passphrase:
504
+ passphrase = aws_kms.decrypt(passphrase.encode())
505
+
506
+ exchange : Union[AnyExchange, None] = await async_instantiate_exchange(
507
+ gateway_id=gateway_id,
508
+ api_key=api_key,
509
+ secret=secret,
510
+ passphrase=passphrase,
511
+ default_type=param['default_type'],
512
+ rate_limit_ms=param['rate_limit_ms'],
513
+ verbose=param['verbose']
514
+ )
515
+ if exchange:
516
+ markets = await exchange.load_markets()
517
+ market = markets[ticker]
518
+ multiplier = market['contractSize'] if 'contractSize' in market and market['contractSize'] else 1
519
+
520
+ balances = await exchange.fetch_balance()
521
+ log(f"Balances: {json.dumps(balances, indent=4)}")
522
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} strategy {TargetStrategy.__name__} starting", message=balances['total'], footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
523
+
524
+ # Lambdas preparation
525
+ order_notional_adj_func_sig = inspect.signature(order_notional_adj_func)
526
+ order_notional_adj_func_params = order_notional_adj_func_sig.parameters.keys()
527
+ allow_entry_initial_func_sig = inspect.signature(allow_entry_initial_func)
528
+ allow_entry_initial_func_params = allow_entry_initial_func_sig.parameters.keys()
529
+ allow_entry_final_func_sig = inspect.signature(allow_entry_final_func)
530
+ allow_entry_final_func_params = allow_entry_final_func_sig.parameters.keys()
531
+ sl_adj_func_sig = inspect.signature(sl_adj_func)
532
+ sl_adj_func_params = sl_adj_func_sig.parameters.keys()
533
+ trailing_stop_threshold_eval_func_sig = inspect.signature(trailing_stop_threshold_eval_func)
534
+ trailing_stop_threshold_eval_func_params = trailing_stop_threshold_eval_func_sig.parameters.keys()
535
+ tp_eval_func_sig = inspect.signature(tp_eval_func)
536
+ tp_eval_func_params = tp_eval_func_sig.parameters.keys()
537
+
538
+ # Trigger candles providers
539
+ def _trigger_producers(
540
+ redis_client : StrictRedis,
541
+ exchange_tickers : List,
542
+ candles_partition_assign_topic : str):
543
+ # https://redis.io/commands/publish/
544
+ redis_client.publish(channel=candles_partition_assign_topic, message=json.dumps(exchange_tickers).encode('utf-8'))
545
+ _trigger_producers(redis_client, [ f"{exchange_name}|{param['ticker']}" ], hi_candles_provider_topic)
546
+ _trigger_producers(redis_client, [ f"{exchange_name}|{param['ticker']}" ], lo_candles_provider_topic)
547
+ _trigger_producers(redis_client, [ f"{exchange_name}|{param['ticker']}" ], orderbooks_provider_topic)
548
+
549
+ # Load cached positions from disk, if any
550
+ if os.path.exists(POSITION_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id)) and os.path.getsize(POSITION_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id))>0:
551
+ pd_position_cache = pd.read_csv(POSITION_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id))
552
+ pd_position_cache.drop(pd_position_cache.columns[pd_position_cache.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
553
+ pd_position_cache.replace([np.nan], [None], inplace=True)
554
+
555
+ pd_position_cache = pd_position_cache[POSITION_CACHE_COLUMNS]
556
+
557
+ if os.path.exists(ORDERHIST_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id)) and os.path.getsize(ORDERHIST_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id))>0:
558
+ orderhist_cache = pd.read_csv(ORDERHIST_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id))
559
+ orderhist_cache.drop(orderhist_cache.columns[orderhist_cache.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
560
+ orderhist_cache.replace([np.nan], [None], inplace=True)
561
+
562
+ if 'datetime' in orderhist_cache.columns:
563
+ orderhist_cache['datetime'] = pd.to_datetime(orderhist_cache['datetime'])
564
+
565
+ block_entries : bool = False
566
+ hi_row, hi_row_tm1 = None, None
567
+ lo_row, lo_row_tm1 = None, None
568
+ effective_tp_trailing_percent : float = param['default_effective_tp_trailing_percent']
569
+ this_ticker_open_trades : List[Dict] = []
570
+
571
+ reversal : bool = False
572
+ tp : bool = False
573
+ sl : bool = False
574
+ tp_min_percent : float = param['tp_min_percent'] # adjusted by trailing_stop_threshold_eval_func
575
+ tp_max_percent : float = param['tp_max_percent']
576
+ executed_position = None
577
+ position_break : bool = False
578
+ while (not tp and not sl and not position_break):
579
+ try:
580
+ dt_now = datetime.now()
581
+ block_entries = False
582
+
583
+ dt_targettz = datetime.fromtimestamp(dt_now.timestamp(), tz=ZoneInfo(param['rolldate_tz']))
584
+ today_dayofweek = dt_targettz.weekday()
585
+
586
+ delta_hour = int(
587
+ (dt_targettz.replace(tzinfo=None) - dt_now).total_seconds()/3600
588
+ )
589
+
590
+ log(f"rolldate_tz: {param['rolldate_tz']}, dt_now (local): {dt_now}, dt_targettz: {dt_targettz}, delta_hour: {delta_hour}")
591
+
592
+ if param['trading_window_start'] and param['trading_window_end']:
593
+ trading_window : Dict[str, str] = {
594
+ 'start' : param['trading_window_start'],
595
+ 'end' : param['trading_window_end']
596
+ }
597
+ parsed_trading_window = parse_trading_window(dt_targettz, trading_window)
598
+ if not parsed_trading_window['in_window']:
599
+ block_entries = True
600
+
601
+ log(f"trading_window start: {param['trading_window_start']}, end: {param['trading_window_end']}, in_window: {parsed_trading_window['in_window']}")
602
+ else:
603
+ log(f"No trading window specified")
604
+
605
+ if full_economic_calendars_topic:
606
+ full_economic_calendars = fetch_economic_events(redis_client, full_economic_calendars_topic)
607
+
608
+ impacting_economic_calendars = None
609
+ if full_economic_calendars:
610
+ impacting_economic_calendars = [ x for x in full_economic_calendars
611
+ if x['event_code'] in param['mapped_event_codes']
612
+ and x['region'] in param['mapped_regions']
613
+ ]
614
+ if impacting_economic_calendars:
615
+ impacting_economic_calendars = [ x for x in impacting_economic_calendars
616
+ if(
617
+ (x['calendar_item_timestamp_ms'] - datetime.now().timestamp()*1000)>0 # Incoming events
618
+ and (x['calendar_item_timestamp_ms'] - datetime.now().timestamp()*1000) < num_intervals_current_ecoevents_ms
619
+ ) or (
620
+ (x['calendar_item_timestamp_ms'] - datetime.now().timestamp()*1000)<0 # Passed events
621
+ and (datetime.now().timestamp()*1000 - x['calendar_item_timestamp_ms']) < num_intervals_current_ecoevents_ms/3
622
+ )
623
+ ]
624
+ s_impacting_economic_calendars = " ".join([ x['event_code'] if x['event_code']!='manual_event' else x['event'] for x in impacting_economic_calendars ])
625
+
626
+ log(f"full_economic_calendars #rows: {len(full_economic_calendars) if full_economic_calendars else 0}")
627
+ log(f"impacting_economic_calendars #rows: {len(impacting_economic_calendars) if impacting_economic_calendars else 0} {s_impacting_economic_calendars}. block_entries: {block_entries}")
628
+
629
+ if param['block_entry_impacting_events'] and impacting_economic_calendars:
630
+ block_entries = True
631
+
632
+ log(f"block_entries: {block_entries}")
633
+
634
+ position_cache_row = pd_position_cache.loc[(pd_position_cache.exchange==exchange_name) & (pd_position_cache.ticker==ticker)]
635
+ if position_cache_row.shape[0]==0:
636
+ position_cache_row = {
637
+ 'exchange': exchange_name,
638
+ 'ticker' : ticker,
639
+
640
+ 'status' : PositionStatus.UNDEFINED.name,
641
+
642
+ 'pos' : None,
643
+ 'pos_usdt' : None,
644
+ 'multiplier' : multiplier,
645
+ 'created' : None,
646
+ 'closed' : None,
647
+
648
+ 'pos_entries' : [],
649
+
650
+ 'spread_bps' : None,
651
+ 'entry_px' : None,
652
+ 'close_px' : None,
653
+ 'last_interval_px' : None,
654
+
655
+ 'ob_mid' : None,
656
+ 'ob_best_bid' : None,
657
+ 'ob_best_ask' : None,
658
+
659
+ 'unreal_live' : 0,
660
+ 'max_unreal_live' : 0,
661
+ 'max_pain' : 0,
662
+ 'max_recovered_pnl' : 0,
663
+ 'pnl_live_bps' : 0,
664
+ 'pnl_open_bps' : 0,
665
+ 'max_unreal_live_bps' : 0,
666
+ 'max_unreal_open_bps' : 0,
667
+
668
+ 'tp_max_target' : None,
669
+ 'tp_min_target' : None,
670
+
671
+ 'running_sl_percent_hard' : param['sl_hard_percent'],
672
+ 'sl_trailing_min_threshold_crossed' : False,
673
+ 'sl_percent_trailing' : param['sl_percent_trailing'],
674
+ 'loss_trailing' : 0
675
+ }
676
+ position_cache_row.update({ind: None for ind in strategy_indicators})
677
+ pd_position_cache = pd.concat([pd_position_cache, pd.DataFrame([position_cache_row])], axis=0, ignore_index=True)
678
+ position_cache_row = pd_position_cache.loc[(pd_position_cache.exchange==exchange_name) & (pd_position_cache.ticker==ticker)]
679
+
680
+ position_cache_row = position_cache_row.iloc[0]
681
+
682
+ # Note: arrow.get will populate tzinfo
683
+ pos = position_cache_row['pos'] if position_cache_row['pos'] else 0
684
+ pos_usdt = position_cache_row['pos_usdt'] if position_cache_row['pos_usdt'] else 0
685
+ pos_status = position_cache_row['status']
686
+ if (pos==0 or pos_usdt<=param['residual_pos_usdt_threshold']) and pos_status==PositionStatus.OPEN.name:
687
+ pos_status = PositionStatus.CLOSED.name
688
+ pd_position_cache.loc[position_cache_row.name, 'status'] = pos_status
689
+ if pos_status!=PositionStatus.OPEN.name and (pos and pos!=0):
690
+ pos_status = PositionStatus.OPEN.name
691
+ pd_position_cache.loc[position_cache_row.name, 'status'] = pos_status
692
+
693
+ pos_created = position_cache_row['created']
694
+ pos_created = arrow.get(pos_created).datetime if pos_created and isinstance(pos_created, str) else pos_created
695
+ total_sec_since_pos_created = INVALID
696
+ if pos_created:
697
+ pos_created = pos_created.replace(tzinfo=None)
698
+ total_sec_since_pos_created = (dt_now - pos_created).total_seconds()
699
+ pos_closed = position_cache_row['closed']
700
+ pos_closed = arrow.get(pos_closed).datetime if pos_closed and isinstance(pos_closed, str) else pos_closed
701
+ if pos_closed:
702
+ pos_closed = pos_closed.replace(tzinfo=None)
703
+ pos_side = OrderSide.UNDEFINED
704
+ if pos_status!=PositionStatus.UNDEFINED.name:
705
+ pos_side = OrderSide.BUY if pos and pos>0 else OrderSide.SELL
706
+
707
+ pos_entries = position_cache_row['pos_entries']
708
+ if isinstance(pos_entries, str):
709
+ datetime_strings = re.findall(r'datetime\.datetime\(([^)]+)\)', pos_entries)
710
+
711
+ pos_entries = []
712
+ for dt_str in datetime_strings:
713
+ dt_parts = [int(part.strip()) for part in dt_str.split(',')]
714
+ if len(dt_parts) == 7:
715
+ pos_entries.append(datetime(*dt_parts))
716
+ elif len(dt_parts) == 6:
717
+ pos_entries.append(datetime(*dt_parts, microsecond=0))
718
+ num_pos_entries = len(pos_entries) if pos_entries else 0
719
+
720
+ unreal_live = position_cache_row['unreal_live']
721
+ max_unreal_live = position_cache_row['max_unreal_live']
722
+ max_pain = position_cache_row['max_pain']
723
+ max_recovered_pnl = position_cache_row['max_recovered_pnl']
724
+ pnl_live_bps = position_cache_row['pnl_live_bps']
725
+ pnl_open_bps = position_cache_row['pnl_open_bps']
726
+ max_unreal_live_bps = position_cache_row['max_unreal_live_bps']
727
+ max_unreal_open_bps = position_cache_row['max_unreal_open_bps']
728
+
729
+ tp_max_target = position_cache_row['tp_max_target']
730
+ tp_min_target = position_cache_row['tp_min_target']
731
+
732
+ running_sl_percent_hard = position_cache_row['running_sl_percent_hard']
733
+ sl_trailing_min_threshold_crossed = position_cache_row['sl_trailing_min_threshold_crossed']
734
+ sl_percent_trailing = position_cache_row['sl_percent_trailing']
735
+ loss_trailing = position_cache_row['loss_trailing']
736
+
737
+ max_unreal_live = position_cache_row['max_unreal_live'] if not position_cache_row['max_unreal_live'] and not pd.isna(position_cache_row['max_unreal_live']) else 0
738
+ max_unreal_live_bps = position_cache_row['max_unreal_live_bps'] if not position_cache_row['max_unreal_live_bps'] and not pd.isna(position_cache_row['max_unreal_live_bps']) else 0
739
+ max_unreal_open_bps = position_cache_row['max_unreal_open_bps'] if not position_cache_row['max_unreal_open_bps'] and not pd.isna(position_cache_row['max_unreal_open_bps']) else 0
740
+ max_pain = position_cache_row['max_pain'] if not position_cache_row['max_pain'] and not pd.isna(position_cache_row['max_pain']) else 0
741
+ max_recovered_pnl = position_cache_row['max_recovered_pnl'] if not position_cache_row['max_recovered_pnl'] and not pd.isna(position_cache_row['max_recovered_pnl']) else 0
742
+
743
+ pnl_percent_notional = pnl_open_bps/100
744
+ max_pain_percent_notional = max_pain / pos_usdt * 100 if pos_usdt!=0 else 0
745
+ max_recovered_pnl_percent_notional = max_recovered_pnl / pos_usdt * 100 if pos_usdt!=0 else 0
746
+
747
+ '''
748
+ 'fetch_position' is for perpetual.
749
+ If you long, you'd see side = 'long'
750
+ If you short, you'd see side = 'short'
751
+ 'contracts' and 'notional' is always positive number.
752
+
753
+ Example for a short,
754
+ 'id': None
755
+ 'symbol': 'BTC/USDT:USDT'
756
+ 'timestamp': ...
757
+ 'datetime': '...'
758
+ 'lastUpdateTimestamp': ... (13 digits integer, in ms)
759
+ 'initialMargin': 91.77863881
760
+ 'initialMarginPercentage': 0.1005108176234939
761
+ 'maintenanceMargin': 5.11374881
762
+ 'maintenanceMarginPercentage': 0.005600290881174695
763
+ 'entryPrice': 95312.2
764
+ 'notional': 913.122
765
+ 'leverage': 10.0
766
+ 'unrealizedPnl': 0.86
767
+ 'realizedPnl': None
768
+ 'contracts': 0.01
769
+ 'contractSize': 1.0
770
+ 'marginRatio': None
771
+ 'liquidationPrice': None
772
+ 'markPrice': 91226.2
773
+ 'lastPrice': None
774
+ 'collateral': 0.0
775
+ 'marginMode': None
776
+ 'side': 'short'
777
+ 'percentage': None
778
+ 'stopLossPrice': None
779
+ 'takeProfitPrice': None
780
+ 'hedged': False
781
+
782
+ For spots/margin trading, you should use 'fetch_balance' instsead. If you short you'd see:
783
+ BTC: { free: -5.2, total: -5.2 })
784
+ '''
785
+ position_from_exchange = await exchange.fetch_position(param['ticker'])
786
+
787
+ if exchange.options['defaultType']!='spot':
788
+ if executed_position and position_from_exchange:
789
+ position_from_exchange_num_contracts = position_from_exchange['contracts']
790
+ if position_from_exchange and position_from_exchange['side']=='short':
791
+ position_from_exchange_num_contracts = position_from_exchange_num_contracts *-1 if position_from_exchange_num_contracts>0 else position_from_exchange_num_contracts
792
+
793
+ position_from_exchange_base_ccy = position_from_exchange_num_contracts * multiplier
794
+
795
+ if position_from_exchange_base_ccy!=executed_position['position']['amount_base_ccy']:
796
+ position_break = True
797
+
798
+ err_msg = f"{param['ticker']}: Position break! expected: {executed_position['position']['amount_base_ccy']}, actual: {position_from_exchange_base_ccy}"
799
+ log(err_msg)
800
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} Position break! {param['ticker']}", message=err_msg, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
801
+
802
+ if position_break:
803
+ log(f"Position break! Exiting execution. Did you manually close the trade?")
804
+ break
805
+
806
+ else:
807
+
808
+ hi_candles_valid, lo_candles_valid, orderbook_valid = False, False, False
809
+ trailing_candles = []
810
+
811
+ keys = [item.decode('utf-8') for item in redis_client.keys()]
812
+ if hi_candles_w_ta_topic in keys:
813
+ message = redis_client.get(hi_candles_w_ta_topic)
814
+ if message:
815
+ message = message.decode('utf-8')
816
+ hi_candles_w_ta = json.loads(message) if message else None
817
+ pd_hi_candles_w_ta = pd.read_json(StringIO(hi_candles_w_ta))
818
+ pd_hi_candles_w_ta['timestamp_ms'] = pd_hi_candles_w_ta['timestamp_ms'].astype('int64') // 1_000_000
819
+ hi_row = pd_hi_candles_w_ta.iloc[-1]
820
+ hi_row_tm1 = pd_hi_candles_w_ta.iloc[-2]
821
+ candles_age = dt_now.timestamp() *1000 - hi_row['timestamp_ms']
822
+ if candles_age < hi_interval_ms:
823
+ hi_candles_valid = True
824
+ else:
825
+ hi_candles_valid = False
826
+ err_msg = {
827
+ 'current_ts_ms' : dt_now.timestamp(),
828
+ 'hi_row_timestamp_ms' : hi_row['timestamp_ms'],
829
+ 'candles_age' : candles_age,
830
+ 'hi_interval_ms' : hi_interval_ms
831
+ }
832
+ log(err_msg, LogLevel.CRITICAL)
833
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} Invalid hi_candles", message=err_msg, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
834
+ else:
835
+ hi_candles_valid = False
836
+ err_msg = f"hi candles missing, topic: {hi_candles_w_ta_topic}"
837
+ log(err_msg, LogLevel.CRITICAL)
838
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} Invalid hi_candles", message=err_msg, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
839
+
840
+ if lo_candles_w_ta_topic in keys:
841
+ message = redis_client.get(lo_candles_w_ta_topic)
842
+ if message:
843
+ message = message.decode('utf-8')
844
+ lo_candles_w_ta = json.loads(message) if message else None
845
+ pd_lo_candles_w_ta = pd.read_json(StringIO(lo_candles_w_ta))
846
+ pd_lo_candles_w_ta['timestamp_ms'] = pd_lo_candles_w_ta['timestamp_ms'].astype('int64') // 1_000_000
847
+ lo_row = pd_lo_candles_w_ta.iloc[-1]
848
+ lo_row_tm1 = pd_lo_candles_w_ta.iloc[-2]
849
+ candles_age = dt_now.timestamp() *1000 - lo_row['timestamp_ms']
850
+ if candles_age < lo_interval_ms:
851
+ lo_candles_valid = True
852
+
853
+ trailing_candles = pd_lo_candles_w_ta \
854
+ .tail(param['reversal_num_intervals']) \
855
+ .values.tolist()
856
+
857
+ trailing_candles = [dict(zip(pd_lo_candles_w_ta.columns, row)) for row in trailing_candles]
858
+
859
+ else:
860
+ lo_candles_valid = False
861
+ err_msg = {
862
+ 'current_ts_ms' : dt_now.timestamp(),
863
+ 'lo_row_timestamp_ms' : lo_row['timestamp_ms'],
864
+ 'candles_age' : candles_age,
865
+ 'lo_interval_ms' : lo_interval_ms
866
+ }
867
+ log(err_msg, LogLevel.CRITICAL)
868
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} Invalid lo_candles", message=err_msg, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
869
+ else:
870
+ lo_candles_valid = False
871
+ err_msg = f"lo candles missing, topic: {lo_candles_w_ta_topic}"
872
+ log(err_msg, LogLevel.CRITICAL)
873
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} Invalid hi_candles", message=err_msg, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
874
+
875
+ if orderbook_topic in keys:
876
+ message = redis_client.get(orderbook_topic)
877
+ if message:
878
+ message = message.decode('utf-8')
879
+ ob = json.loads(message) if message else None
880
+ orderbook_valid = ob['is_valid']
881
+
882
+ else:
883
+ orderbook_valid = False
884
+
885
+ if not orderbook_valid:
886
+ ob = await exchange.fetch_order_book(symbol=param['ticker'], limit=10)
887
+ err_msg = f"orderbook missing, topic: {orderbook_topic}, fetch from REST instead"
888
+ log(err_msg, LogLevel.WARNING)
889
+
890
+ if hi_candles_valid and lo_candles_valid: # On turn of interval, candles_provider may need a little time to publish latest candles
891
+
892
+ # Strategies uses different indicators, thus: TargetStrategy.get_strategy_indicators()
893
+ for indicator in strategy_indicators:
894
+ indicator_source : str = indicator.split(":")[0]
895
+ indicator_name = indicator.split(":")[-1]
896
+ if indicator_source=="lo_row":
897
+ indicator_value = lo_row[indicator_name]
898
+ elif indicator_source=="lo_row_tm1":
899
+ indicator_value = lo_row_tm1[indicator_name]
900
+ elif indicator_source=="hi_row":
901
+ indicator_value = hi_row[indicator_name]
902
+ elif indicator_source=="hi_row_tm1":
903
+ indicator_value = hi_row_tm1[indicator_name]
904
+ pd_position_cache.loc[position_cache_row.name, indicator] = indicator_value
905
+
906
+ best_ask = min([x[0] for x in ob['asks']])
907
+ best_bid = max([x[0] for x in ob['bids']])
908
+ mid = (best_ask+best_bid)/2
909
+ spread_bps = (best_ask/best_bid - 1) * 10000
910
+
911
+ last_candles=trailing_candles # alias
912
+
913
+ if pos_status!=PositionStatus.UNDEFINED.name:
914
+ pos_usdt = mid * pos
915
+ pd_position_cache.loc[position_cache_row.name, 'pos_usdt'] = pos_usdt
916
+
917
+ if pos_side == OrderSide.BUY:
918
+ unreal_live = (mid - pos_entry_px) * param['amount_base_ccy']
919
+ unrealized_pnl_optimistic = (trailing_candles[-1][2] - pos_entry_px) * param['amount_base_ccy'] # trailing_candles[-1][2]: high
920
+ unrealized_pnl_pessimistic = (trailing_candles[-1][3] - pos_entry_px) * param['amount_base_ccy'] # trailing_candles[-1][3]: low
921
+ unrealized_pnl_open = unreal_live
922
+ if total_sec_since_pos_created > (lo_interval_ms/1000):
923
+ '''
924
+ "unrealized_pnl_open": To align with backtests, motivation is to avoid spikes and trigger trailing stops too early.
925
+ But we need be careful with tall candles immediately upon entries.
926
+ trailing_candles[-1] is latest candle
927
+ trailing_candles[-1][1] is 'open' from latest candle
928
+ Example long BTC, a mean reversion trade
929
+ pos_entry_px $97,000
930
+ open $99,000 (This is trailing_candles[-1][1], so it's big red candle)
931
+ mid $97,200 (Seconds after entry)
932
+
933
+ unreal_live $200 per BTC
934
+ unrealized_pnl_open $2000 per BTC (This is very misleading! This would cause algo to TP prematurely!)
935
+ Thus for new entries,
936
+ unrealized_pnl_open = unreal_live
937
+ '''
938
+ unrealized_pnl_open = (trailing_candles[-1][1] - pos_entry_px) * param['amount_base_ccy']
939
+ elif pos_side == OrderSide.SELL:
940
+ unreal_live = (pos_entry_px - mid) * param['amount_base_ccy']
941
+ unrealized_pnl_optimistic = (trailing_candles[-1][3] - pos_entry_px) * param['amount_base_ccy'] # trailing_candles[-1][3]: low
942
+ unrealized_pnl_pessimistic = (trailing_candles[-1][2] - pos_entry_px) * param['amount_base_ccy'] # trailing_candles[-1][2]: high
943
+ unrealized_pnl_open = unreal_live
944
+ if total_sec_since_pos_created > lo_interval_ms/1000:
945
+ unrealized_pnl_open = (pos_entry_px - trailing_candles[-1][1]) * param['amount_base_ccy']
946
+
947
+ kwargs = {k: v for k, v in locals().items() if k in trailing_stop_threshold_eval_func_params}
948
+ trailing_stop_threshold_eval_func_result = trailing_stop_threshold_eval_func(**kwargs)
949
+ tp_min_percent = trailing_stop_threshold_eval_func_result['tp_min_percent']
950
+ tp_max_percent = trailing_stop_threshold_eval_func_result['tp_max_percent']
951
+
952
+ pnl_live_bps = unreal_live / abs(pos_usdt) * 10000 if pos_usdt else 0
953
+ pnl_open_bps = unrealized_pnl_open / abs(pos_usdt) * 10000 if pos_usdt else 0
954
+ pnl_percent_notional = pnl_open_bps/100
955
+
956
+ if unreal_live>max_unreal_live:
957
+ max_unreal_live = unreal_live
958
+
959
+ if pnl_live_bps>max_unreal_live_bps:
960
+ max_unreal_live_bps = pnl_live_bps
961
+
962
+ if pnl_open_bps>max_unreal_open_bps:
963
+ max_unreal_open_bps = pnl_open_bps
964
+
965
+ if unrealized_pnl_pessimistic < max_pain:
966
+ max_pain = unrealized_pnl_pessimistic
967
+
968
+ if unrealized_pnl_optimistic < 0 and unrealized_pnl_optimistic>max_pain:
969
+ recovered_pnl = unrealized_pnl_optimistic - max_pain
970
+ if recovered_pnl > max_recovered_pnl:
971
+ max_recovered_pnl = recovered_pnl
972
+
973
+ max_pain_percent_notional = max_pain / pos_usdt * 100
974
+ max_recovered_pnl_percent_notional = max_recovered_pnl / pos_usdt * 100
975
+
976
+ loss_trailing = (1 - pnl_live_bps/max_unreal_live_bps) * 100 if pnl_live_bps>0 else 0
977
+
978
+ pd_position_cache.loc[position_cache_row.name, 'unreal_live'] = unreal_live
979
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_live'] = max_unreal_live
980
+ pd_position_cache.loc[position_cache_row.name, 'max_pain'] = max_pain
981
+ pd_position_cache.loc[position_cache_row.name, 'max_recovered_pnl'] = max_recovered_pnl
982
+ pd_position_cache.loc[position_cache_row.name, 'pnl_live_bps'] = pnl_live_bps
983
+ pd_position_cache.loc[position_cache_row.name, 'pnl_open_bps'] = pnl_open_bps
984
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_live_bps'] = max_unreal_live_bps
985
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_open_bps'] = max_unreal_open_bps
986
+
987
+ pd_position_cache.loc[position_cache_row.name, 'spread_bps'] = spread_bps
988
+ pd_position_cache.loc[position_cache_row.name, 'ob_mid'] = mid
989
+ pd_position_cache.loc[position_cache_row.name, 'ob_best_bid'] = best_bid
990
+ pd_position_cache.loc[position_cache_row.name, 'ob_best_ask'] = best_ask
991
+
992
+ kwargs = {k: v for k, v in locals().items() if k in allow_entry_initial_func_params}
993
+ allow_entry_func_initial_result = allow_entry_initial_func(**kwargs)
994
+ allow_entry_long = allow_entry_func_initial_result['long']
995
+ allow_entry_short = allow_entry_func_initial_result['short']
996
+
997
+ allow_entry = allow_entry_long or allow_entry_short
998
+ allow_entry = allow_entry and pos_status!=PositionStatus.OPEN.name
999
+ if allow_entry and not block_entries:
1000
+ kwargs = {k: v for k, v in locals().items() if k in allow_entry_final_func_params}
1001
+ allow_entry_func_final_result = allow_entry_final_func(**kwargs)
1002
+ allow_entry_final_long = allow_entry_func_final_result['long']
1003
+ allow_entry_final_short = allow_entry_func_final_result['short']
1004
+ target_price_long = allow_entry_func_final_result['target_price_long']
1005
+ target_price_short = allow_entry_func_final_result['target_price_short']
1006
+
1007
+ if allow_entry_final_long or allow_entry_final_short:
1008
+ if allow_entry_final_long:
1009
+ side = 'buy'
1010
+ pnl_potential_bps = (target_price_long/mid - 1) *10000 if target_price_long else None
1011
+ elif allow_entry_final_short:
1012
+ side = 'sell'
1013
+ pnl_potential_bps = (mid/target_price_short - 1) *10000 if target_price_short else None
1014
+ else:
1015
+ raise ValueError("Either allow_long or allow_short!")
1016
+
1017
+ # tp_min_percent adj: Strategies where target_price not based on tp_max_percent, but variable
1018
+ if pnl_potential_bps<tp_max_percent:
1019
+ tp_minmax_ratio = tp_min_percent/tp_max_percent
1020
+ tp_max_percent = pnl_potential_bps
1021
+ tp_min_percent = tp_minmax_ratio * tp_max_percent
1022
+
1023
+ kwargs = {k: v for k, v in locals().items() if k in order_notional_adj_func_params}
1024
+ order_notional_adj_func_result = order_notional_adj_func(**kwargs)
1025
+ target_order_notional = order_notional_adj_func_result['target_order_notional']
1026
+
1027
+ entry_positions : List[DivisiblePosition] = [
1028
+ DivisiblePosition(
1029
+ ticker = param['ticker'],
1030
+ side = side,
1031
+ amount = target_order_notional,
1032
+ leg_room_bps = param['leg_room_bps'],
1033
+ order_type = param['order_type'],
1034
+ slices = param['slices'],
1035
+ wait_fill_threshold_ms = param['wait_fill_threshold_ms']
1036
+ )
1037
+ ]
1038
+ log(f"dispatching {side} orders to {gateway_id}")
1039
+ executed_positions : Union[Dict[JSON_SERIALIZABLE_TYPES, JSON_SERIALIZABLE_TYPES], None] = execute_positions(
1040
+ redis_client=redis_client,
1041
+ positions=entry_positions,
1042
+ ordergateway_pending_orders_topic=ordergateway_pending_orders_topic,
1043
+ ordergateway_executions_topic=ordergateway_executions_topic
1044
+ )
1045
+ for executed_position in executed_positions:
1046
+ if not executed_position['done']:
1047
+ err_msg = executed_position['execution_err']
1048
+ log(err_msg, log_level=LogLevel.ERROR)
1049
+ dispatch_notification(title=f"singlelegta error from order gateway {gateway_id}!!", message=err_msg, log_level=logging.ERROR)
1050
+ raise ValueError(err_msg)
1051
+ executed_position = executed_positions[0] # We sent only one DivisiblePosition.
1052
+
1053
+ new_pos_from_exchange =executed_position['filled_amount']
1054
+ amount_filled_usdt = mid * new_pos_from_exchange
1055
+ pos_entry_px = executed_position['average_cost']
1056
+ new_pos_usdt_from_exchange = new_pos_from_exchange * executed_position['average_cost']
1057
+ fees = executed_position['fees']
1058
+
1059
+ if side=='buy':
1060
+ tp_max_price = mid * (1 + pnl_potential_bps/10000)
1061
+ tp_min_price = mid * (1 + tp_min_percent/10000)
1062
+ sl_price = mid * (1 - param['sl_hard_percent']/100)
1063
+
1064
+ elif side=='sell':
1065
+ tp_max_price = mid * (1 - pnl_potential_bps/10000)
1066
+ tp_min_price = mid * (1 - tp_min_percent/10000)
1067
+ sl_price = mid * (1 + param['sl_hard_percent']/100)
1068
+
1069
+ executed_position['position'] = {
1070
+ 'status' : 'open',
1071
+ 'pos_entry_px' : pos_entry_px,
1072
+ 'mid' : mid,
1073
+ 'amount_base_ccy' : executed_position['filled_amount'],
1074
+ 'tp_min_price' : tp_min_price,
1075
+ 'tp_max_price' : tp_max_price,
1076
+ 'sl_price' : sl_price,
1077
+ 'multiplier' : multiplier
1078
+ }
1079
+
1080
+ pd_position_cache.loc[position_cache_row.name, 'pos'] = pos + new_pos_from_exchange
1081
+ pd_position_cache.loc[position_cache_row.name, 'pos_usdt'] = pos_usdt + new_pos_usdt_from_exchange
1082
+ pd_position_cache.loc[position_cache_row.name, 'status'] = PositionStatus.OPEN.name
1083
+ pos_created = datetime.fromtimestamp(time.time())
1084
+ pd_position_cache.loc[position_cache_row.name, 'created'] = pos_created
1085
+ pd_position_cache.loc[position_cache_row.name, 'closed'] = None
1086
+ pd_position_cache.loc[position_cache_row.name, 'entry_px'] = pos_entry_px
1087
+ pd_position_cache.loc[position_cache_row.name, 'close_px'] = None
1088
+ pd_position_cache.loc[position_cache_row.name, 'unreal_live'] = None
1089
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_live'] = 0
1090
+ pd_position_cache.loc[position_cache_row.name, 'max_pain'] = 0
1091
+ pd_position_cache.loc[position_cache_row.name, 'max_recovered_pnl'] = 0
1092
+ pd_position_cache.loc[position_cache_row.name, 'pnl_live_bps'] = None
1093
+ pd_position_cache.loc[position_cache_row.name, 'pnl_open_bps'] = None
1094
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_live_bps'] = 0
1095
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_open_bps'] = 0
1096
+ pd_position_cache.loc[position_cache_row.name, 'tp_max_target'] = tp_max_price
1097
+ pd_position_cache.loc[position_cache_row.name, 'tp_min_target'] = tp_min_price
1098
+ pd_position_cache.loc[position_cache_row.name, 'running_sl_percent_hard'] = param['sl_hard_percent']
1099
+ pd_position_cache.loc[position_cache_row.name, 'sl_trailing_min_threshold_crossed'] = False
1100
+ pd_position_cache.loc[position_cache_row.name, 'sl_percent_trailing'] = float('inf')
1101
+ pd_position_cache.loc[position_cache_row.name, 'loss_trailing'] = 0
1102
+
1103
+ pos_entries.append(pos_created)
1104
+ pd_position_cache.at[position_cache_row.name, 'pos_entries'] = pos_entries
1105
+
1106
+ # This is for tp_eval_func
1107
+ this_ticker_open_trades.append(
1108
+ {
1109
+ 'ticker' : param['ticker'],
1110
+ 'side' : side,
1111
+ 'amount' : target_order_notional,
1112
+ 'tp_max_price' : tp_max_price,
1113
+ 'target_price' : tp_max_price # This is the only field needed by backtest_core generic_tp_eval
1114
+ }
1115
+ )
1116
+
1117
+ orderhist_cache_row = {
1118
+ 'datetime' : dt_now,
1119
+ 'exchange' : exchange_name,
1120
+ 'ticker' : ticker,
1121
+ 'reason' : 'entry',
1122
+ 'side' : side,
1123
+ 'avg_price' : new_pos_usdt_from_exchange/new_pos_from_exchange,
1124
+ 'amount': abs(new_pos_usdt_from_exchange),
1125
+ 'pnl' : 0,
1126
+ 'pnl_bps' : 0,
1127
+ 'max_pain' : 0
1128
+ }
1129
+ orderhist_cache = pd.concat([orderhist_cache, pd.DataFrame([orderhist_cache_row])], axis=0, ignore_index=True)
1130
+
1131
+ dispatch_notification(title=f"{param['current_filename']} {gateway_id} Entry succeeded. {param['ticker']} {side} {param['amount_base_ccy']} (USD amount: {amount_filled_usdt}) @ {pos_entry_px}", message=executed_position['position'], footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
1132
+
1133
+ '''
1134
+ Have a look at this for a visual explaination how "Gradually tightened stops" works:
1135
+ https://github.com/r0bbar/siglab/blob/master/siglab_py/tests/manual/trading_util_tests.ipynb
1136
+ '''
1137
+ if (
1138
+ (pnl_percent_notional>0 and pnl_percent_notional>=tp_min_percent) # pnl_percent_notional is evaluated using pnl_open_bps to avoid spikes
1139
+ or (
1140
+ pnl_percent_notional<0
1141
+ and max_recovered_pnl_percent_notional>=param['recover_min_percent']
1142
+ and abs(max_pain_percent_notional)>=param['recover_max_pain_percent']
1143
+ ) # Taking 'abs': Trailing stop can fire if trade moves in either direction - if your trade is losing trade.
1144
+ ):
1145
+ if not sl_trailing_min_threshold_crossed:
1146
+ sl_trailing_min_threshold_crossed = True
1147
+ pd_position_cache.loc[position_cache_row.name, 'sl_trailing_min_threshold_crossed'] = sl_trailing_min_threshold_crossed
1148
+
1149
+ msg = {
1150
+ 'side' : pos_side.name,
1151
+ 'mid' : mid,
1152
+ 'pos_entry_px' : pos_entry_px,
1153
+ 'pnl_open_bps' : pnl_open_bps,
1154
+ 'tp_min_percent' : tp_min_percent
1155
+ }
1156
+ log(msg, LogLevel.CRITICAL)
1157
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} sl_trailing_min_threshold_crossed: True!", message=msg, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
1158
+
1159
+
1160
+ _effective_tp_trailing_percent = calc_eff_trailing_sl(
1161
+ tp_min_percent = tp_min_percent,
1162
+ tp_max_percent = tp_max_percent,
1163
+ sl_percent_trailing = tp_max_percent,
1164
+ pnl_percent_notional = max_unreal_open_bps/100, # Note: Use [max]_unrealized_pnl_percent, not unrealized_pnl_percent!
1165
+ default_effective_tp_trailing_percent = param['default_effective_tp_trailing_percent'],
1166
+ linear=True if tp_max_percent >= param['trailing_sl_min_percent_linear'] else False, # If tp_max_percent far (>100bps for example), there's more uncertainty if target can be reached: Go with linear.
1167
+ pow=param['non_linear_pow']
1168
+ )
1169
+
1170
+ # Once pnl pass tp_min_percent, trailing stops will be activated. Even if pnl fall back below tp_min_percent.
1171
+ effective_tp_trailing_percent = min(effective_tp_trailing_percent, _effective_tp_trailing_percent)
1172
+
1173
+ pd_position_cache.loc[position_cache_row.name, 'effective_tp_trailing_percent'] = effective_tp_trailing_percent
1174
+
1175
+
1176
+ # STEP 2. Unwind position
1177
+ if pos!=0:
1178
+ tp = False
1179
+ sl = False
1180
+ if unreal_live>0:
1181
+ kwargs = {k: v for k, v in locals().items() if k in tp_eval_func_params}
1182
+ tp_final = tp_eval_func(**kwargs)
1183
+
1184
+ if tp_final:
1185
+ tp = True
1186
+ elif loss_trailing>=effective_tp_trailing_percent:
1187
+ tp = True
1188
+
1189
+ else:
1190
+ kwargs = {k: v for k, v in locals().items() if k in sl_adj_func_params}
1191
+ sl_adj_func_result = sl_adj_func(**kwargs)
1192
+ running_sl_percent_hard = sl_adj_func_result['running_sl_percent_hard']
1193
+
1194
+ if abs(pnl_live_bps/100)>=running_sl_percent_hard:
1195
+ sl = True
1196
+
1197
+ if tp or sl:
1198
+ exit_positions : List[DivisiblePosition] = [
1199
+ DivisiblePosition(
1200
+ ticker = param['ticker'],
1201
+ side = 'sell' if pos_side==OrderSide.BUY else 'buy',
1202
+ amount = param['amount_base_ccy'],
1203
+ leg_room_bps = param['leg_room_bps'],
1204
+ order_type = param['order_type'],
1205
+ slices = param['slices'],
1206
+ wait_fill_threshold_ms = param['wait_fill_threshold_ms'],
1207
+
1208
+ reduce_only=True
1209
+ )
1210
+ ]
1211
+ log(f"Closing position. {ticker}, pos: {pos}, pos_usdt: {pos_usdt}")
1212
+ executed_positions : Union[Dict[JSON_SERIALIZABLE_TYPES, JSON_SERIALIZABLE_TYPES], None] = execute_positions(
1213
+ redis_client=redis_client,
1214
+ positions=exit_positions,
1215
+ ordergateway_pending_orders_topic=ordergateway_pending_orders_topic,
1216
+ ordergateway_executions_topic=ordergateway_executions_topic
1217
+ )
1218
+ if executed_positions:
1219
+ executed_position_close = executed_positions[0] # We sent only one DivisiblePosition.
1220
+ if executed_position_close['done']:
1221
+ if pos_side==OrderSide.BUY:
1222
+ closed_pnl = (executed_position_close['average_cost'] - pos_entry_px) * param['amount_base_ccy']
1223
+ else:
1224
+ closed_pnl = (pos_entry_px - executed_position_close['average_cost']) * param['amount_base_ccy']
1225
+
1226
+ new_pos_from_exchange = abs(pos) + executed_position_close['filled_amount']
1227
+ new_pos_usdt_from_exchange = new_pos_from_exchange * executed_position_close['average_cost']
1228
+ fees = executed_position_close['fees']
1229
+
1230
+ executed_position_close['position'] = {
1231
+ 'status' : 'TP' if tp else 'SL',
1232
+ 'pnl_live_bps' : pnl_live_bps,
1233
+ 'pos_entry_px' : pos_entry_px,
1234
+ 'mid' : mid,
1235
+ 'amount_base_ccy' : executed_position_close['filled_amount'],
1236
+ 'closed_pnl' : closed_pnl,
1237
+ }
1238
+
1239
+ new_status = PositionStatus.SL.name if closed_pnl<=0 else PositionStatus.CLOSED.name
1240
+ pd_position_cache.loc[position_cache_row.name, 'pos'] = new_pos_from_exchange
1241
+ pd_position_cache.loc[position_cache_row.name, 'pos_usdt'] = new_pos_usdt_from_exchange
1242
+ pd_position_cache.loc[position_cache_row.name, 'status'] = new_status
1243
+ pd_position_cache.loc[position_cache_row.name, 'closed'] = dt_now
1244
+ pd_position_cache.loc[position_cache_row.name, 'close_px'] = mid # mid is approx of actual fill price!
1245
+ pd_position_cache.loc[position_cache_row.name, 'unreal_live'] = None
1246
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_live'] = 0
1247
+ pd_position_cache.loc[position_cache_row.name, 'max_pain'] = 0
1248
+ pd_position_cache.loc[position_cache_row.name, 'max_recovered_pnl'] = 0
1249
+ pd_position_cache.loc[position_cache_row.name, 'pnl_live_bps'] = None
1250
+ pd_position_cache.loc[position_cache_row.name, 'pnl_open_bps'] = None
1251
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_live_bps'] = 0
1252
+ pd_position_cache.loc[position_cache_row.name, 'max_unreal_open_bps'] = 0
1253
+
1254
+ pd_position_cache.at[position_cache_row.name, 'pos_entries'] = []
1255
+ pd_position_cache.loc[position_cache_row.name, 'tp_max_target'] = None
1256
+ pd_position_cache.loc[position_cache_row.name, 'tp_min_target'] = None
1257
+ pd_position_cache.loc[position_cache_row.name, 'running_sl_percent_hard'] = param['sl_hard_percent']
1258
+ pd_position_cache.loc[position_cache_row.name, 'sl_trailing_min_threshold_crossed'] = False
1259
+ pd_position_cache.loc[position_cache_row.name, 'sl_percent_trailing'] = param['sl_percent_trailing']
1260
+ pd_position_cache.loc[position_cache_row.name, 'loss_trailing'] = 0
1261
+
1262
+
1263
+ # This is for tp_eval_func
1264
+ this_ticker_open_trades.clear()
1265
+
1266
+ orderhist_cache_row = {
1267
+ 'datetime' : dt_now,
1268
+ 'exchange' : exchange_name,
1269
+ 'ticker' : ticker,
1270
+ 'reason' : new_status,
1271
+ 'side' : 'sell' if pos_side==OrderSide.BUY else 'buy',
1272
+ 'avg_price' : mid, # mid is actually not avg_price!
1273
+ 'amount': abs(new_pos_usdt_from_exchange),
1274
+ 'unreal_live' : unreal_live,
1275
+ 'pnl_live_bps' : pnl_live_bps,
1276
+ 'max_pain' : max_pain
1277
+ }
1278
+ orderhist_cache = pd.concat([orderhist_cache, pd.DataFrame([orderhist_cache_row])], axis=0, ignore_index=True)
1279
+
1280
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} {'TP' if tp else 'SL'} succeeded. closed_pnl: {closed_pnl}", message=executed_position_close, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
1281
+
1282
+ else:
1283
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} Exit execution failed. {param['ticker']}", message=executed_position_close, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
1284
+
1285
+ log(f"[{gateway_id}]", log_level=LogLevel.INFO)
1286
+ log(f"{tabulate(pd_position_cache, headers='keys', tablefmt='psql')}", log_level=LogLevel.INFO)
1287
+
1288
+ pd_position_cache.to_csv(POSITION_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id))
1289
+ orderhist_cache.to_csv(ORDERHIST_CACHE_FILE_NAME.replace("$GATEWAY_ID$", gateway_id))
1290
+
1291
+ except Exception as loop_err:
1292
+ err_msg = f"Error: {loop_err} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}"
1293
+ log(err_msg, log_level=LogLevel.ERROR)
1294
+ dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} error. {param['ticker']}", message=err_msg, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.ERROR, logger=logger)
1295
+
1296
+ finally:
1297
+ time.sleep(int(param['loop_freq_ms']/1000))
1298
+
1299
+ asyncio.run(
1300
+ main(
1301
+ order_notional_adj_func=TargetStrategy.order_notional_adj,
1302
+ allow_entry_initial_func=TargetStrategy.allow_entry_initial,
1303
+ allow_entry_final_func=TargetStrategy.allow_entry_final,
1304
+ sl_adj_func=TargetStrategy.sl_adj,
1305
+ trailing_stop_threshold_eval_func=TargetStrategy.trailing_stop_threshold_eval,
1306
+ tp_eval_func=TargetStrategy.tp_eval
1307
+ )
1308
+ )