siglab-py 0.2.8__tar.gz → 0.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siglab-py might be problematic. Click here for more details.
- {siglab_py-0.2.8 → siglab_py-0.3.0}/PKG-INFO +1 -1
- {siglab_py-0.2.8 → siglab_py-0.3.0}/pyproject.toml +1 -1
- {siglab_py-0.2.8 → siglab_py-0.3.0}/setup.cfg +1 -1
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/ordergateway/gateway.py +91 -29
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/tests/integration/market_data_util_tests.py +1 -1
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/tests/unit/analytic_util_tests.py +3 -1
- siglab_py-0.3.0/siglab_py/tests/unit/trading_util_tests.py +60 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/util/analytic_util.py +16 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/util/notification_util.py +18 -9
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/util/slack_notification_util.py +9 -5
- siglab_py-0.3.0/siglab_py/util/trading_util.py +66 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py.egg-info/PKG-INFO +1 -1
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py.egg-info/SOURCES.txt +3 -1
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/constants.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/exchanges/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/exchanges/any_exchange.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/exchanges/futubull.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/aggregated_orderbook_provider.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/candles_provider.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/candles_ta_provider.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/deribit_options_expiry_provider.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/futu_candles_ta_to_csv.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/orderbooks_provider.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/test_provider.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/ordergateway/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/ordergateway/client.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/ordergateway/encrypt_keys_util.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/ordergateway/test_ordergateway.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/tests/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/tests/integration/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/tests/unit/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/tests/unit/market_data_util_tests.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/util/__init__.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/util/aws_util.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/util/market_data_util.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/util/retry_util.py +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py.egg-info/dependency_links.txt +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py.egg-info/requires.txt +0 -0
- {siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py.egg-info/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "siglab_py"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "0.3.0"
|
|
8
8
|
description = "Market data fetches, TA calculations and generic order gateway."
|
|
9
9
|
authors = [{name = "r0bbarh00d", email = "r0bbarh00d@gmail.com"}]
|
|
10
10
|
license = {text = "MIT"}
|
|
@@ -25,8 +25,10 @@ import ccxt.pro as ccxtpro
|
|
|
25
25
|
|
|
26
26
|
from siglab_py.exchanges.any_exchange import AnyExchange
|
|
27
27
|
from siglab_py.ordergateway.client import Order, DivisiblePosition
|
|
28
|
-
from siglab_py.util import notification_util
|
|
29
28
|
from siglab_py.constants import LogLevel # type: ignore
|
|
29
|
+
from util.notification_util import dispatch_notification
|
|
30
|
+
|
|
31
|
+
current_filename = os.path.basename(__file__)
|
|
30
32
|
|
|
31
33
|
'''
|
|
32
34
|
Usage:
|
|
@@ -114,7 +116,7 @@ To debug from vscode, launch.json:
|
|
|
114
116
|
"order_type": "limit",
|
|
115
117
|
"leg_room_bps": 5,
|
|
116
118
|
"slices": 5,
|
|
117
|
-
"wait_fill_threshold_ms":
|
|
119
|
+
"wait_fill_threshold_ms": 5000,
|
|
118
120
|
"executions": {},
|
|
119
121
|
"filled_amount": 0,
|
|
120
122
|
"average_cost": 0
|
|
@@ -132,7 +134,7 @@ To debug from vscode, launch.json:
|
|
|
132
134
|
"order_type": "limit",
|
|
133
135
|
"leg_room_bps": 5,
|
|
134
136
|
"slices": 5,
|
|
135
|
-
"wait_fill_threshold_ms":
|
|
137
|
+
"wait_fill_threshold_ms": 5000,
|
|
136
138
|
"executions": {
|
|
137
139
|
"xxx": { <-- order id from exchange
|
|
138
140
|
"info": { <-- ccxt convention, raw response from exchanges under info tag
|
|
@@ -199,6 +201,9 @@ param : Dict = {
|
|
|
199
201
|
"default_fees_ccy" : None,
|
|
200
202
|
"loop_freq_ms" : 500, # reduce this if you need trade faster
|
|
201
203
|
"loops_random_delay_multiplier" : 1, # Add randomness to time between slices are sent off. Set to 1 if no random delay needed.
|
|
204
|
+
"wait_fill_threshold_ms" : 5000,
|
|
205
|
+
|
|
206
|
+
'current_filename' : current_filename,
|
|
202
207
|
|
|
203
208
|
'notification' : {
|
|
204
209
|
'footer' : None,
|
|
@@ -284,6 +289,7 @@ def parse_args():
|
|
|
284
289
|
|
|
285
290
|
parser.add_argument("--default_fees_ccy", help="If you're trading crypto, CEX fees USDT, DEX fees USDC in many cases. Default None, in which case gateway won't aggregatge fees from executions for you.", default=None)
|
|
286
291
|
parser.add_argument("--loop_freq_ms", help="Loop delays. Reduce this if you want to trade faster.", default=500)
|
|
292
|
+
parser.add_argument("--wait_fill_threshold_ms", help="Wait for fills for how long?", default=5000)
|
|
287
293
|
|
|
288
294
|
parser.add_argument("--encrypt_decrypt_with_aws_kms", help="Y or N. If encrypt_decrypt_with_aws_kms=N, pass in apikey, secret and passphrase unencrypted (Not recommended, for testing only). If Y, they will be decrypted using AMS KMS key.", default='N')
|
|
289
295
|
parser.add_argument("--aws_kms_key_id", help="AWS KMS key ID", default=None)
|
|
@@ -310,6 +316,7 @@ def parse_args():
|
|
|
310
316
|
param['rate_limit_ms'] = int(args.rate_limit_ms)
|
|
311
317
|
param['default_fees_ccy'] = args.default_fees_ccy
|
|
312
318
|
param['loop_freq_ms'] = int(args.loop_freq_ms)
|
|
319
|
+
param['wait_fill_threshold_ms'] = int(args.wait_fill_threshold_ms)
|
|
313
320
|
|
|
314
321
|
if args.encrypt_decrypt_with_aws_kms:
|
|
315
322
|
if args.encrypt_decrypt_with_aws_kms=='Y':
|
|
@@ -327,7 +334,8 @@ def parse_args():
|
|
|
327
334
|
param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
|
|
328
335
|
param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
|
|
329
336
|
param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
|
|
330
|
-
|
|
337
|
+
|
|
338
|
+
param['notification']['footer'] = f"From {param['current_filename']} {param['gateway_id']}"
|
|
331
339
|
|
|
332
340
|
def init_redis_client() -> StrictRedis:
|
|
333
341
|
redis_client : StrictRedis = StrictRedis(
|
|
@@ -451,11 +459,11 @@ async def watch_orders_task(
|
|
|
451
459
|
order_id = order_update['id']
|
|
452
460
|
executions[order_id] = order_update
|
|
453
461
|
|
|
454
|
-
log(f"order updates: {order_updates}", log_level=LogLevel.INFO)
|
|
462
|
+
log(f"order updates: {json.dumps(order_updates, indent=4)}", log_level=LogLevel.INFO)
|
|
455
463
|
except Exception as loop_err:
|
|
456
464
|
print(f"watch_orders_task error: {loop_err}")
|
|
457
465
|
|
|
458
|
-
await asyncio.sleep(
|
|
466
|
+
await asyncio.sleep(param['loop_freq_ms']/1000)
|
|
459
467
|
|
|
460
468
|
async def send_heartbeat(exchange):
|
|
461
469
|
|
|
@@ -489,10 +497,25 @@ async def execute_one_position(
|
|
|
489
497
|
multiplier = market['contractSize'] if 'contractSize' in market and market['contractSize'] else 1
|
|
490
498
|
position.multiplier = multiplier
|
|
491
499
|
|
|
500
|
+
log(f"{position.ticker} min_amount: {min_amount}, multiplier: {multiplier}")
|
|
501
|
+
|
|
492
502
|
slices : List[Order] = position.to_slices()
|
|
503
|
+
|
|
504
|
+
# Residual handling in last slice
|
|
505
|
+
last_slice = slices[-1]
|
|
506
|
+
last_slice_rounded_amount_in_base_ccy = exchange.amount_to_precision(position.ticker, last_slice.amount/multiplier) # After divided by multiplier, rounded_slice_amount_in_base_ccy in number of contracts actually (Not in base ccy).
|
|
507
|
+
last_slice_rounded_amount_in_base_ccy = float(last_slice_rounded_amount_in_base_ccy) if last_slice_rounded_amount_in_base_ccy else 0
|
|
508
|
+
if last_slice_rounded_amount_in_base_ccy<=min_amount:
|
|
509
|
+
slices.pop()
|
|
510
|
+
slices[-1].amount += last_slice.amount
|
|
511
|
+
|
|
512
|
+
log(f"{position.ticker} Last slice residual smaller than min_amount. Amount is added to prev slice instead. last_slice_amount: {last_slice.amount/multiplier}, last_slice_rounded_amount: {last_slice_rounded_amount_in_base_ccy}")
|
|
513
|
+
|
|
493
514
|
i = 0
|
|
494
515
|
for slice in slices:
|
|
495
516
|
try:
|
|
517
|
+
log(f"{position.ticker} sending slice# {i}")
|
|
518
|
+
|
|
496
519
|
dt_now : datetime = datetime.now()
|
|
497
520
|
|
|
498
521
|
slice_amount_in_base_ccy : float = slice.amount
|
|
@@ -620,9 +643,9 @@ async def execute_one_position(
|
|
|
620
643
|
log(f"Order dispatched: {order_id}. status: {order_status}, filled_amount: {filled_amount}, remaining_amount: {remaining_amount}")
|
|
621
644
|
|
|
622
645
|
if not order_status or order_status!='closed':
|
|
623
|
-
start_time = time.time()
|
|
624
646
|
wait_threshold_sec = position.wait_fill_threshold_ms / 1000
|
|
625
|
-
|
|
647
|
+
|
|
648
|
+
start_time = time.time()
|
|
626
649
|
elapsed_sec = time.time() - start_time
|
|
627
650
|
while elapsed_sec < wait_threshold_sec:
|
|
628
651
|
order_update = None
|
|
@@ -637,12 +660,15 @@ async def execute_one_position(
|
|
|
637
660
|
position.append_execution(order_id, order_update)
|
|
638
661
|
|
|
639
662
|
if remaining_amount <= 0:
|
|
640
|
-
log(f"Limit order fully filled: {order_id}", log_level=LogLevel.INFO)
|
|
663
|
+
log(f"Limit order fully filled: {order_id}, order_update: {json.dumps(order_update, indent=4)}", log_level=LogLevel.INFO)
|
|
641
664
|
break
|
|
642
665
|
|
|
643
666
|
loops_random_delay_multiplier : int = random.randint(1, param['loops_random_delay_multiplier']) if param['loops_random_delay_multiplier']!=1 else 1
|
|
644
|
-
loop_freq_sec : int =
|
|
667
|
+
loop_freq_sec : int = max(1, param['loop_freq_ms']/1000)
|
|
645
668
|
await asyncio.sleep(loop_freq_sec * loops_random_delay_multiplier)
|
|
669
|
+
|
|
670
|
+
elapsed_sec = time.time() - start_time
|
|
671
|
+
log(f"{position.ticker} waiting for order update ... elapsed_sec: {elapsed_sec}")
|
|
646
672
|
|
|
647
673
|
|
|
648
674
|
# Cancel hung limit order, resend as market
|
|
@@ -653,13 +679,11 @@ async def execute_one_position(
|
|
|
653
679
|
filled_amount = order_update['filled']
|
|
654
680
|
remaining_amount = order_update['remaining']
|
|
655
681
|
order_update['multiplier'] = multiplier
|
|
656
|
-
position.append_execution(order_id, order_update)
|
|
657
682
|
|
|
658
|
-
|
|
659
|
-
order_status = order_update['status']
|
|
660
|
-
filled_amount = order_update['filled']
|
|
661
|
-
remaining_amount = order_update['remaining']
|
|
683
|
+
position.append_execution(order_id, order_update)
|
|
662
684
|
|
|
685
|
+
if order_status!='closed':
|
|
686
|
+
log(f"Final order_update before cancel+resend: {json.dumps(order_update, indent=4)}", log_level=LogLevel.INFO)
|
|
663
687
|
await exchange.cancel_order(order_id, position.ticker) # type: ignore
|
|
664
688
|
position.get_execution(order_id)['status'] = 'canceled'
|
|
665
689
|
log(f"Canceled unfilled/partial filled order: {order_id}. Resending remaining_amount: {remaining_amount} as market order.", log_level=LogLevel.INFO)
|
|
@@ -680,7 +704,11 @@ async def execute_one_position(
|
|
|
680
704
|
executed_resent_order['multiplier'] = multiplier
|
|
681
705
|
position.append_execution(order_id, executed_resent_order)
|
|
682
706
|
|
|
683
|
-
|
|
707
|
+
wait_threshold_sec = position.wait_fill_threshold_ms / 1000
|
|
708
|
+
|
|
709
|
+
start_time = time.time()
|
|
710
|
+
elapsed_sec = time.time() - start_time
|
|
711
|
+
while (not order_status or order_status!='closed') and (elapsed_sec < wait_threshold_sec):
|
|
684
712
|
order_update = None
|
|
685
713
|
if order_id in executions:
|
|
686
714
|
order_update = executions[order_id]
|
|
@@ -691,11 +719,24 @@ async def execute_one_position(
|
|
|
691
719
|
filled_amount = order_update['filled']
|
|
692
720
|
remaining_amount = order_update['remaining']
|
|
693
721
|
|
|
694
|
-
|
|
722
|
+
elapsed_sec = time.time() - start_time
|
|
723
|
+
log(f"Waiting for resent market order to close {order_id} ... elapsed_sec: {elapsed_sec}")
|
|
695
724
|
|
|
696
|
-
|
|
725
|
+
loops_random_delay_multiplier : int = random.randint(1, param['loops_random_delay_multiplier']) if param['loops_random_delay_multiplier']!=1 else 1
|
|
726
|
+
loop_freq_sec : int = max(1, param['loop_freq_ms']/1000)
|
|
727
|
+
await asyncio.sleep(loop_freq_sec * loops_random_delay_multiplier)
|
|
697
728
|
|
|
698
|
-
|
|
729
|
+
if (not order_status or order_status!='closed'):
|
|
730
|
+
# If no update from websocket, do one last fetch via REST
|
|
731
|
+
order_update = await exchange.fetch_order(order_id, position.ticker) # type: ignore
|
|
732
|
+
order_status = order_update['status']
|
|
733
|
+
filled_amount = order_update['filled']
|
|
734
|
+
remaining_amount = order_update['remaining']
|
|
735
|
+
order_update['multiplier'] = multiplier
|
|
736
|
+
|
|
737
|
+
log(f"Resent market order{order_id} filled. status: {order_status}, filled_amount: {filled_amount}, remaining_amount: {remaining_amount} {json.dumps(order_update, indent=4)}")
|
|
738
|
+
else:
|
|
739
|
+
log(f"{position.ticker} {order_id} status (From REST): {json.dumps(order_update, indent=4)}")
|
|
699
740
|
|
|
700
741
|
slice.dispatched_price = rounded_limit_price
|
|
701
742
|
slice.dispatched_amount = rounded_slice_amount_in_base_ccy
|
|
@@ -714,9 +755,16 @@ async def execute_one_position(
|
|
|
714
755
|
)
|
|
715
756
|
raise slice_err
|
|
716
757
|
finally:
|
|
758
|
+
log(f"{position.ticker} done slice# {i}")
|
|
717
759
|
i += 1
|
|
718
|
-
|
|
760
|
+
|
|
761
|
+
log(f"{position.ticker} patch_executions")
|
|
719
762
|
position.patch_executions()
|
|
763
|
+
|
|
764
|
+
log(f"Dispatched slices:")
|
|
765
|
+
for dispatched_slice in position.dispatched_slices:
|
|
766
|
+
log(f"{json.dumps(dispatched_slice.to_dict(), indent=4)}")
|
|
767
|
+
|
|
720
768
|
position.filled_amount = position.get_filled_amount()
|
|
721
769
|
position.average_cost = position.get_average_cost()
|
|
722
770
|
position.fees = position.get_fees()
|
|
@@ -734,14 +782,28 @@ async def execute_one_position(
|
|
|
734
782
|
position.done = True
|
|
735
783
|
|
|
736
784
|
log(f"Executions:")
|
|
737
|
-
log(f"{json.dumps(position.
|
|
738
|
-
|
|
739
|
-
|
|
785
|
+
log(f"{json.dumps(position.to_dict(), indent=4)}")
|
|
786
|
+
|
|
787
|
+
notification_summary = {
|
|
788
|
+
'ticker' : position.ticker,
|
|
789
|
+
'side' : position.side,
|
|
790
|
+
'num_executions' : len(position.get_executions()),
|
|
791
|
+
'filled_amount' : position.filled_amount,
|
|
792
|
+
'average_cost' : position.average_cost,
|
|
793
|
+
'pos' : position.pos,
|
|
794
|
+
'done' : position.done
|
|
795
|
+
}
|
|
796
|
+
dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} execute_one_position done. {position.ticker} {position.side} {position.amount}", message=notification_summary, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
|
|
740
797
|
|
|
741
798
|
except Exception as position_execution_err:
|
|
742
|
-
|
|
743
|
-
|
|
799
|
+
err_msg = f"Execution failed: {position_execution_err} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}"
|
|
800
|
+
log(f"Execution failed: {err_msg}")
|
|
744
801
|
|
|
802
|
+
dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} execute_one_position failed!!! {position.ticker} {position.side} {position.amount}", message=position.get_executions(), footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.ERROR, logger=logger) # type: ignore
|
|
803
|
+
|
|
804
|
+
position.done = False
|
|
805
|
+
position.execution_err = err_msg
|
|
806
|
+
|
|
745
807
|
async def work(
|
|
746
808
|
param : Dict,
|
|
747
809
|
exchange : AnyExchange,
|
|
@@ -788,7 +850,7 @@ async def work(
|
|
|
788
850
|
reduce_only=order['reduce_only'],
|
|
789
851
|
fees_ccy=order['fees_ccy'] if 'fees_ccy' in order else param['default_fees_ccy'],
|
|
790
852
|
slices=order['slices'],
|
|
791
|
-
wait_fill_threshold_ms=order['wait_fill_threshold_ms']
|
|
853
|
+
wait_fill_threshold_ms=order['wait_fill_threshold_ms'] if order['wait_fill_threshold_ms']>0 else param['wait_fill_threshold_ms']
|
|
792
854
|
)
|
|
793
855
|
for order in orders
|
|
794
856
|
]
|
|
@@ -800,7 +862,7 @@ async def work(
|
|
|
800
862
|
|
|
801
863
|
i = 0
|
|
802
864
|
for position in positions:
|
|
803
|
-
log(f"{i} {position.ticker}, {position.side} # executions: {len(position.get_executions())}, filled_amount: {position.filled_amount}, average_cost: {position.average_cost}, pos: {position.pos}, order_dispatch_elapsed_ms: {order_dispatch_elapsed_ms}")
|
|
865
|
+
log(f"{i} {position.ticker}, {position.side} # executions: {len(position.get_executions())}, filled_amount: {position.filled_amount}, average_cost: {position.average_cost}, pos: {position.pos}, done: {position.done}, error: {position.execution_err}, order_dispatch_elapsed_ms: {order_dispatch_elapsed_ms}")
|
|
804
866
|
i += 1
|
|
805
867
|
|
|
806
868
|
start = time.time()
|
|
@@ -826,7 +888,7 @@ async def work(
|
|
|
826
888
|
except Exception as loop_error:
|
|
827
889
|
log(f"Error: {loop_error} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}")
|
|
828
890
|
finally:
|
|
829
|
-
await asyncio.sleep(
|
|
891
|
+
await asyncio.sleep(param['loop_freq_ms']/1000)
|
|
830
892
|
|
|
831
893
|
async def main():
|
|
832
894
|
parse_args()
|
|
@@ -880,7 +942,7 @@ async def main():
|
|
|
880
942
|
# Once exchange instantiated, try fetch_balance to confirm connectivity and test credentials.
|
|
881
943
|
balances = await exchange.fetch_balance() # type: ignore
|
|
882
944
|
log(f"{param['gateway_id']}: account balances {balances}")
|
|
883
|
-
|
|
945
|
+
dispatch_notification(title=f"{param['current_filename']} {param['gateway_id']} started", message=balances, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger)
|
|
884
946
|
|
|
885
947
|
await work(param=param, exchange=exchange, redis_client=redis_client, notification_params=notification_params)
|
|
886
948
|
|
|
@@ -100,7 +100,7 @@ class MarketDataUtilTests(unittest.TestCase):
|
|
|
100
100
|
'defaultType': 'swap' }
|
|
101
101
|
}
|
|
102
102
|
|
|
103
|
-
exchange : Exchange = okx(param)
|
|
103
|
+
exchange : Exchange = okx(param) # type: ignore
|
|
104
104
|
normalized_symbols = [ 'BTC/USDT:USDT' ]
|
|
105
105
|
pd_candles: Union[pd.DataFrame, None] = fetch_candles(
|
|
106
106
|
start_ts=start_date.timestamp(),
|
|
@@ -49,7 +49,9 @@ class AnalyticUtilTests(unittest.TestCase):
|
|
|
49
49
|
'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr',
|
|
50
50
|
'is_green', 'pct_change_close',
|
|
51
51
|
'sma_short_periods', 'sma_long_periods', 'ema_short_periods', 'ema_long_periods', 'ema_close',
|
|
52
|
-
'std', 'std_percent',
|
|
52
|
+
'std', 'std_percent',
|
|
53
|
+
'candle_height_percent', 'candle_height_percent_rounded',
|
|
54
|
+
'log_return', 'interval_historical_volatility',
|
|
53
55
|
'chop_against_ema',
|
|
54
56
|
'ema_volume_short_periods', 'ema_volume_long_periods',
|
|
55
57
|
'max_short_periods', 'max_long_periods', 'idmax_short_periods', 'idmax_long_periods', 'min_short_periods', 'min_long_periods', 'idmin_short_periods', 'idmin_long_periods',
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
from typing import Union
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from util.trading_util import *
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# @unittest.skip("Skip all integration tests.")
|
|
10
|
+
class TradingUtilTests(unittest.TestCase):
|
|
11
|
+
def test_timestamp_to_active_trading_regions_case1(self):
|
|
12
|
+
tp_min_percent : float = 1.5
|
|
13
|
+
tp_max_percent : float = 2.5
|
|
14
|
+
sl_percent_trailing : float = 50 # Trailing stop loss in percent
|
|
15
|
+
default_effective_sl_percent_trailing : float = 50
|
|
16
|
+
|
|
17
|
+
pnl_percent_notional : float = 0.5 # Trade's current pnl in percent.
|
|
18
|
+
|
|
19
|
+
effective_tp_trailing_percent = calc_eff_trailing_sl(
|
|
20
|
+
tp_min_percent = tp_min_percent,
|
|
21
|
+
tp_max_percent = tp_max_percent,
|
|
22
|
+
sl_percent_trailing = sl_percent_trailing,
|
|
23
|
+
pnl_percent_notional = pnl_percent_notional,
|
|
24
|
+
default_effective_sl_percent_trailing = default_effective_sl_percent_trailing
|
|
25
|
+
)
|
|
26
|
+
assert(effective_tp_trailing_percent==50) # Generous trailing SL when trading starting out and pnl small.
|
|
27
|
+
|
|
28
|
+
def test_timestamp_to_active_trading_regions_case2(self):
|
|
29
|
+
tp_min_percent : float = 1.5
|
|
30
|
+
tp_max_percent : float = 2.5
|
|
31
|
+
sl_percent_trailing : float = 50 # Trailing stop loss in percent
|
|
32
|
+
default_effective_sl_percent_trailing : float = 50
|
|
33
|
+
|
|
34
|
+
pnl_percent_notional : float = 2 # Trade's current pnl in percent.
|
|
35
|
+
|
|
36
|
+
effective_tp_trailing_percent = calc_eff_trailing_sl(
|
|
37
|
+
tp_min_percent = tp_min_percent,
|
|
38
|
+
tp_max_percent = tp_max_percent,
|
|
39
|
+
sl_percent_trailing = sl_percent_trailing,
|
|
40
|
+
pnl_percent_notional = pnl_percent_notional,
|
|
41
|
+
default_effective_sl_percent_trailing = default_effective_sl_percent_trailing
|
|
42
|
+
)
|
|
43
|
+
assert(effective_tp_trailing_percent==25) # Intermediate trailing SL
|
|
44
|
+
|
|
45
|
+
def test_timestamp_to_active_trading_regions_case3(self):
|
|
46
|
+
tp_min_percent : float = 1.5
|
|
47
|
+
tp_max_percent : float = 2.5
|
|
48
|
+
sl_percent_trailing : float = 50 # Trailing stop loss in percent
|
|
49
|
+
default_effective_sl_percent_trailing : float = 50
|
|
50
|
+
|
|
51
|
+
pnl_percent_notional : float = 2.5 # Trade's current pnl in percent.
|
|
52
|
+
|
|
53
|
+
effective_tp_trailing_percent = calc_eff_trailing_sl(
|
|
54
|
+
tp_min_percent = tp_min_percent,
|
|
55
|
+
tp_max_percent = tp_max_percent,
|
|
56
|
+
sl_percent_trailing = sl_percent_trailing,
|
|
57
|
+
pnl_percent_notional = pnl_percent_notional,
|
|
58
|
+
default_effective_sl_percent_trailing = default_effective_sl_percent_trailing
|
|
59
|
+
)
|
|
60
|
+
assert(effective_tp_trailing_percent==0) # Most tight trailing SL
|
|
@@ -101,9 +101,25 @@ def compute_candles_stats(
|
|
|
101
101
|
pd_candles['std'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).std()
|
|
102
102
|
|
|
103
103
|
pd_candles['std_percent'] = pd_candles['std'] / pd_candles['ema_close'] * 100
|
|
104
|
+
|
|
104
105
|
pd_candles['candle_height_percent'] = pd_candles['candle_height'] / pd_candles['ema_close'] * 100
|
|
105
106
|
pd_candles['candle_height_percent_rounded'] = pd_candles['candle_height_percent'].round().astype('Int64')
|
|
106
107
|
|
|
108
|
+
'''
|
|
109
|
+
To annualize volatility:
|
|
110
|
+
if candle_interval == '1m':
|
|
111
|
+
annualization_factor = np.sqrt(365 * 24 * 60) # 1-minute candles
|
|
112
|
+
elif candle_interval == '1h':
|
|
113
|
+
annualization_factor = np.sqrt(365 * 24) # 1-hour candles
|
|
114
|
+
elif candle_interval == '1d':
|
|
115
|
+
annualization_factor = np.sqrt(365) # 1-day candles
|
|
116
|
+
pd_candles['annualized_volatility'] = (
|
|
117
|
+
pd_candles['interval_historical_volatility'] * annualization_factor
|
|
118
|
+
)
|
|
119
|
+
'''
|
|
120
|
+
pd_candles['log_return'] = np.log(pd_candles['close'] / pd_candles['close'].shift(1))
|
|
121
|
+
pd_candles['interval_historical_volatility'] = pd_candles['log_return'].rolling(window=sliding_window_how_many_candles).std()
|
|
122
|
+
|
|
107
123
|
pd_candles['chop_against_ema'] = (
|
|
108
124
|
(~pd_candles['is_green'] & (pd_candles['close'] > pd_candles['ema_close'])) | # Case 1: Green candle and close > EMA
|
|
109
125
|
(pd_candles['is_green'] & (pd_candles['close'] < pd_candles['ema_close'])) # Case 2: Red candle and close < EMA
|
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import json
|
|
2
2
|
from typing import Any, Dict, Union
|
|
3
|
+
from datetime import datetime, timezone
|
|
3
4
|
import pandas as pd
|
|
4
5
|
import numpy as np
|
|
5
6
|
from tabulate import tabulate
|
|
6
7
|
|
|
7
|
-
from
|
|
8
|
+
from util.slack_notification_util import slack_dispatch_notification
|
|
8
9
|
|
|
9
10
|
from siglab_py.constants import LogLevel
|
|
10
11
|
|
|
@@ -13,16 +14,24 @@ def dispatch_notification(
|
|
|
13
14
|
message : Union[str, Dict, pd.DataFrame],
|
|
14
15
|
footer : str,
|
|
15
16
|
params : Dict[str, Any],
|
|
16
|
-
log_level : LogLevel = LogLevel.INFO
|
|
17
|
+
log_level : LogLevel = LogLevel.INFO,
|
|
18
|
+
logger = None
|
|
17
19
|
):
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
20
|
+
try:
|
|
21
|
+
if isinstance(message, Dict):
|
|
22
|
+
_message = json.dumps(message, indent=2, separators=(' ', ':'))
|
|
23
|
+
elif isinstance(message, pd.DataFrame):
|
|
24
|
+
_message = tabulate(message, headers='keys', tablefmt='orgtbl') # type: ignore
|
|
25
|
+
else:
|
|
26
|
+
_message = message
|
|
24
27
|
|
|
25
|
-
|
|
28
|
+
utc_time = datetime.now(timezone.utc)
|
|
29
|
+
footer = f"UTC {utc_time} {footer}"
|
|
30
|
+
|
|
31
|
+
slack_dispatch_notification(title, _message, footer, params, log_level)
|
|
32
|
+
except Exception as any_notification_error:
|
|
33
|
+
if logger:
|
|
34
|
+
logger.info(f"Failed to dispatch notification: {any_notification_error}")
|
|
26
35
|
|
|
27
36
|
if __name__ == '__main__':
|
|
28
37
|
params : Dict[str, Any] = {
|
|
@@ -13,15 +13,19 @@ def slack_dispatch_notification(
|
|
|
13
13
|
message : str,
|
|
14
14
|
footer : str,
|
|
15
15
|
params : Dict[str, Any],
|
|
16
|
-
log_level : LogLevel = LogLevel.INFO
|
|
16
|
+
log_level : LogLevel = LogLevel.INFO,
|
|
17
|
+
max_message_len : int = 1800
|
|
17
18
|
):
|
|
18
19
|
slack_params = params['slack']
|
|
19
20
|
|
|
20
|
-
|
|
21
|
+
# Slack slack ... https://stackoverflow.com/questions/60344831/slack-api-invalid-block
|
|
22
|
+
message = message[:max_message_len]
|
|
23
|
+
|
|
24
|
+
if log_level.value==LogLevel.INFO.value or log_level.value==LogLevel.DEBUG.value:
|
|
21
25
|
webhook_url = slack_params['info']['webhook_url']
|
|
22
|
-
elif log_level==LogLevel.CRITICAL:
|
|
26
|
+
elif log_level.value==LogLevel.CRITICAL.value:
|
|
23
27
|
webhook_url = slack_params['critical']['webhook_url']
|
|
24
|
-
elif log_level==LogLevel.ERROR:
|
|
28
|
+
elif log_level.value==LogLevel.ERROR.value:
|
|
25
29
|
webhook_url = slack_params['alert']['webhook_url']
|
|
26
30
|
else:
|
|
27
31
|
webhook_url = slack_params['info']['webhook_url']
|
|
@@ -30,7 +34,7 @@ def slack_dispatch_notification(
|
|
|
30
34
|
return
|
|
31
35
|
|
|
32
36
|
data = {
|
|
33
|
-
"username": "
|
|
37
|
+
"username": "siglab_py",
|
|
34
38
|
"type": "section",
|
|
35
39
|
"blocks": [
|
|
36
40
|
{
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
|
|
2
|
+
'''
|
|
3
|
+
pnl_percent_notional = Trade's current pnl in percent.
|
|
4
|
+
|
|
5
|
+
Examples,
|
|
6
|
+
y-axis:
|
|
7
|
+
max (i.e most tight) = 0%
|
|
8
|
+
sl_percent_trailing = 50% (Trailing stop loss in percent)
|
|
9
|
+
|
|
10
|
+
x-axis:
|
|
11
|
+
min TP = 1.5% <-- min TP
|
|
12
|
+
max TP = 2.5% <-- max TP
|
|
13
|
+
|
|
14
|
+
slope = (0-50)/(2.5-1.5) = -50
|
|
15
|
+
effective_tp_trailing_percent = slope * (pnl_percent_notional - 1.5%) + sl_percent_trailing
|
|
16
|
+
|
|
17
|
+
Case 1. pnl_percent_notional = 0.5% (Trade starting off, only +50bps pnl. i.e. min TP)
|
|
18
|
+
effective_tp_trailing_percent = slope * (pnl_percent_notional - 0.5%) + sl_percent_trailing
|
|
19
|
+
= -50 * (1.5-1.5) + 50%
|
|
20
|
+
= 0 + 50
|
|
21
|
+
= 50% (Most loose)
|
|
22
|
+
|
|
23
|
+
Case 2. pnl_percent_notional = 2% (Deeper into profit, +200bps pnl)
|
|
24
|
+
effective_tp_trailing_percent = slope * (pnl_percent_notional - 1.5%) + sl_percent_trailing
|
|
25
|
+
= -50 * (2-1.5) +50%
|
|
26
|
+
= -25 + 50
|
|
27
|
+
= 25% (Somewhat tight)
|
|
28
|
+
|
|
29
|
+
Case 3. pnl_percent_notional = 2.5% (Very deep in profit, +250bps pnl. i.e. max TP)
|
|
30
|
+
effective_tp_trailing_percent = slope * (pnl_percent_notional - 1.5%) + sl_percent_trailing
|
|
31
|
+
= -50 * (2.5-1.5) +50%
|
|
32
|
+
= -50 + 50
|
|
33
|
+
= 0 (Most tight)
|
|
34
|
+
|
|
35
|
+
So you see, effective_tp_trailing_percent gets smaller and smaller as pnl approach max TP, finally zero.
|
|
36
|
+
|
|
37
|
+
How to use it?
|
|
38
|
+
if loss_trailing>=effective_tp_trailing_percent and pnl_percent_notional > tp_min_percent:
|
|
39
|
+
Fire trailing stops and take profit.
|
|
40
|
+
|
|
41
|
+
What's 'loss_trailing'? 'loss_trailing' is essentially pnl drop from max_unrealized_pnl_live.
|
|
42
|
+
|
|
43
|
+
Say, when trade started off:
|
|
44
|
+
unrealized_pnl_live = $80
|
|
45
|
+
max_unrealized_pnl_live = $100
|
|
46
|
+
loss_trailing = (1 - unrealized_pnl_live/max_unrealized_pnl_live) = (1-80/100) = 0.2 (Or 20%)
|
|
47
|
+
|
|
48
|
+
If pnl worsen:
|
|
49
|
+
unrealized_pnl_live = $40
|
|
50
|
+
max_unrealized_pnl_live = $100
|
|
51
|
+
loss_trailing = (1 - unrealized_pnl_live/max_unrealized_pnl_live) = (1-40/100) = 0.6 (Or 60%)
|
|
52
|
+
'''
|
|
53
|
+
def calc_eff_trailing_sl(
|
|
54
|
+
tp_min_percent : float,
|
|
55
|
+
tp_max_percent : float,
|
|
56
|
+
sl_percent_trailing : float,
|
|
57
|
+
pnl_percent_notional : float,
|
|
58
|
+
default_effective_sl_percent_trailing : float = 50
|
|
59
|
+
) -> float:
|
|
60
|
+
slope = (0 - sl_percent_trailing) / (tp_max_percent - tp_min_percent)
|
|
61
|
+
effective_sl_percent_trailing = (
|
|
62
|
+
slope * (pnl_percent_notional - tp_min_percent) + sl_percent_trailing
|
|
63
|
+
if pnl_percent_notional>tp_min_percent
|
|
64
|
+
else default_effective_sl_percent_trailing
|
|
65
|
+
)
|
|
66
|
+
return effective_sl_percent_trailing
|
|
@@ -30,10 +30,12 @@ siglab_py/tests/integration/market_data_util_tests.py
|
|
|
30
30
|
siglab_py/tests/unit/__init__.py
|
|
31
31
|
siglab_py/tests/unit/analytic_util_tests.py
|
|
32
32
|
siglab_py/tests/unit/market_data_util_tests.py
|
|
33
|
+
siglab_py/tests/unit/trading_util_tests.py
|
|
33
34
|
siglab_py/util/__init__.py
|
|
34
35
|
siglab_py/util/analytic_util.py
|
|
35
36
|
siglab_py/util/aws_util.py
|
|
36
37
|
siglab_py/util/market_data_util.py
|
|
37
38
|
siglab_py/util/notification_util.py
|
|
38
39
|
siglab_py/util/retry_util.py
|
|
39
|
-
siglab_py/util/slack_notification_util.py
|
|
40
|
+
siglab_py/util/slack_notification_util.py
|
|
41
|
+
siglab_py/util/trading_util.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/aggregated_orderbook_provider.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py
RENAMED
|
File without changes
|
|
File without changes
|
{siglab_py-0.2.8 → siglab_py-0.3.0}/siglab_py/market_data_providers/futu_candles_ta_to_csv.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|