siglab-py 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siglab-py might be problematic. Click here for more details.

Files changed (33) hide show
  1. siglab_py-0.1.0/PKG-INFO +35 -0
  2. siglab_py-0.1.0/pyproject.toml +39 -0
  3. siglab_py-0.1.0/setup.cfg +43 -0
  4. siglab_py-0.1.0/siglab_py/__init__.py +0 -0
  5. siglab_py-0.1.0/siglab_py/constants.py +3 -0
  6. siglab_py-0.1.0/siglab_py/exchanges/__init__.py +0 -0
  7. siglab_py-0.1.0/siglab_py/exchanges/any_exchange.py +20 -0
  8. siglab_py-0.1.0/siglab_py/market_data_providers/__init__.py +0 -0
  9. siglab_py-0.1.0/siglab_py/market_data_providers/aggregated_orderbook_provider.py +451 -0
  10. siglab_py-0.1.0/siglab_py/market_data_providers/candles_provider.py +342 -0
  11. siglab_py-0.1.0/siglab_py/market_data_providers/candles_ta_provider.py +263 -0
  12. siglab_py-0.1.0/siglab_py/market_data_providers/deribit_options_expiry_provider.py +197 -0
  13. siglab_py-0.1.0/siglab_py/market_data_providers/orderbooks_provider.py +359 -0
  14. siglab_py-0.1.0/siglab_py/market_data_providers/test_provider.py +70 -0
  15. siglab_py-0.1.0/siglab_py/ordergateway/__init__.py +0 -0
  16. siglab_py-0.1.0/siglab_py/ordergateway/client.py +137 -0
  17. siglab_py-0.1.0/siglab_py/ordergateway/encrypt_keys_util.py +43 -0
  18. siglab_py-0.1.0/siglab_py/ordergateway/gateway.py +658 -0
  19. siglab_py-0.1.0/siglab_py/ordergateway/test_ordergateway.py +140 -0
  20. siglab_py-0.1.0/siglab_py/tests/__init__.py +0 -0
  21. siglab_py-0.1.0/siglab_py/tests/integration/__init__.py +0 -0
  22. siglab_py-0.1.0/siglab_py/tests/integration/market_data_util_tests.py +123 -0
  23. siglab_py-0.1.0/siglab_py/tests/unit/__init__.py +0 -0
  24. siglab_py-0.1.0/siglab_py/util/__init__.py +0 -0
  25. siglab_py-0.1.0/siglab_py/util/analytic_util.py +792 -0
  26. siglab_py-0.1.0/siglab_py/util/aws_util.py +47 -0
  27. siglab_py-0.1.0/siglab_py/util/market_data_util.py +385 -0
  28. siglab_py-0.1.0/siglab_py/util/retry_util.py +15 -0
  29. siglab_py-0.1.0/siglab_py.egg-info/PKG-INFO +35 -0
  30. siglab_py-0.1.0/siglab_py.egg-info/SOURCES.txt +32 -0
  31. siglab_py-0.1.0/siglab_py.egg-info/dependency_links.txt +1 -0
  32. siglab_py-0.1.0/siglab_py.egg-info/requires.txt +22 -0
  33. siglab_py-0.1.0/siglab_py.egg-info/top_level.txt +1 -0
@@ -0,0 +1,35 @@
1
+ Metadata-Version: 2.1
2
+ Name: siglab_py
3
+ Version: 0.1.0
4
+ Summary: Market data fetches, TA calculations and generic order gateway.
5
+ Author: r0bbarh00d
6
+ Author-email: r0bbarh00d <r0bbarh00d@gmail.com>
7
+ License: MIT
8
+ Project-URL: Homepage, https://github.com/r0bbar/siglab/blob/master/siglab_py/README.md
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.9.19
13
+ Description-Content-Type: text/markdown
14
+ Requires-Dist: python-dotenv
15
+ Requires-Dist: dotmap
16
+ Requires-Dist: typing_extensions
17
+ Requires-Dist: arrow
18
+ Requires-Dist: tzlocal
19
+ Requires-Dist: nest_asyncio
20
+ Requires-Dist: pandas
21
+ Requires-Dist: numpy
22
+ Requires-Dist: boto3
23
+ Requires-Dist: web3
24
+ Requires-Dist: ccxt
25
+ Requires-Dist: ccxtpro
26
+ Requires-Dist: yfinance
27
+ Requires-Dist: yahoofinancials
28
+ Requires-Dist: scipy
29
+ Requires-Dist: statsmodels
30
+ Requires-Dist: scikit-learn
31
+ Requires-Dist: sklearn.preprocessing
32
+ Requires-Dist: hurst
33
+ Requires-Dist: redis
34
+ Requires-Dist: redis-py-cluster
35
+ Requires-Dist: kafka-python
@@ -0,0 +1,39 @@
1
+ [build-system]
2
+ requires = ["setuptools", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "siglab_py"
7
+ version = "0.1.0"
8
+ description = "Market data fetches, TA calculations and generic order gateway."
9
+ authors = [{name = "r0bbarh00d", email = "r0bbarh00d@gmail.com"}]
10
+ license = {text = "MIT"}
11
+ readme = "README.md"
12
+ requires-python = ">=3.9.19"
13
+ dependencies = [
14
+ "python-dotenv",
15
+ "dotmap",
16
+ "typing_extensions",
17
+ "arrow",
18
+ "tzlocal",
19
+ "nest_asyncio",
20
+ "pandas",
21
+ "numpy",
22
+ "boto3",
23
+ "web3",
24
+ "ccxt",
25
+ "ccxtpro",
26
+ "yfinance",
27
+ "yahoofinancials",
28
+ "scipy",
29
+ "statsmodels",
30
+ "scikit-learn",
31
+ "sklearn.preprocessing",
32
+ "hurst",
33
+ "redis",
34
+ "redis-py-cluster",
35
+ "kafka-python",
36
+ ]
37
+
38
+ [project.urls]
39
+ Homepage = "https://github.com/r0bbar/siglab/blob/master/siglab_py/README.md"
@@ -0,0 +1,43 @@
1
+ [metadata]
2
+ name = siglab_py
3
+ version = 0.1.0
4
+ description = Market data fetches, TA calculations and generic order gateway.
5
+ author = r0bbarh00d
6
+ author_email = r0bbarh00d@gmail.com
7
+ license = MIT
8
+ classifiers =
9
+ Programming Language :: Python :: 3
10
+ License :: OSI Approved :: MIT License
11
+ Operating System :: OS Independent
12
+
13
+ [options]
14
+ packages = find:
15
+ python_requires = >=3.9.19
16
+ install_requires =
17
+ python-dotenv
18
+ dotmap
19
+ typing_extensions
20
+ arrow
21
+ tzlocal
22
+ nest_asyncio
23
+ pandas
24
+ numpy
25
+ boto3
26
+ web3
27
+ ccxt
28
+ ccxtpro
29
+ yfinance
30
+ yahoofinancials
31
+ scipy
32
+ statsmodels
33
+ scikit-learn
34
+ sklearn.preprocessing
35
+ hurst
36
+ redis
37
+ redis-py-cluster
38
+ kafka-python
39
+
40
+ [egg_info]
41
+ tag_build =
42
+ tag_date = 0
43
+
File without changes
@@ -0,0 +1,3 @@
1
+ from typing import Union, List, Dict, Any
2
+
3
+ JSON_SERIALIZABLE_TYPES = Union[str, bool, int, float, None, List[Any], Dict[Any, Any]]
File without changes
@@ -0,0 +1,20 @@
1
+ from ccxt.base.exchange import Exchange
2
+
3
+ '''
4
+ The idea here is, if we were to trade thru for example IKBR, which is a tradfi broker not supported by CCXT, we would implement IBKR like CCXT implement for crypto exchanges.
5
+ Concrete implementation will:
6
+ a. Inherit from AnyExchange, thus ccxt Exchange class
7
+ b. Override ccxt basic functions
8
+ - load_markets
9
+ - fetch_balance
10
+ - create_order
11
+ - update_order
12
+ - cancel_order
13
+ - fetch_order (REST) vs watch_orders (websocket)
14
+ - order amount rounding: amount_to_precision
15
+ - order price rounding: price_to_precision
16
+ ... etc
17
+ '''
18
+ class AnyExchange(Exception):
19
+ def __init__(self, *args: object) -> None:
20
+ super().__init__(*args)
@@ -0,0 +1,451 @@
1
+ from enum import Enum
2
+ import argparse
3
+ import time
4
+ from datetime import datetime
5
+ from typing import Any, Dict, Union
6
+ import logging
7
+ import json
8
+ from tabulate import tabulate
9
+ import asyncio
10
+ import pandas as pd
11
+ import numpy as np
12
+ from redis import StrictRedis
13
+
14
+ '''
15
+ From command prompt:
16
+ python aggregated_orderbook.py --normalized_symbol BTC/USDT:USDT --sliding_window_num_intervals 1200 --update_imabalce_csv_intervals 100 --dump_imbalance_to_disk Y --publish_imbalance_to_redis N
17
+ or
18
+ pypy aggregated_orderbook.py --normalized_symbol BTC/USDT:USDT --sliding_window_num_intervals 1200 --update_imabalce_csv_intervals 100 --dump_imbalance_to_disk Y --publish_imbalance_to_redis N
19
+
20
+ This script is pypy compatible.
21
+
22
+ Spot orderbooks REST API from exchanges
23
+ Binance https://binance-docs.github.io/apidocs/spot/en/#order-book
24
+ OKX https://www.okx.com/docs-v5/en/#order-book-trading-market-data-get-order-book
25
+ 'sz': Order book depth per side. Maximum 400, e.g. 400 bids + 400 asks
26
+ Bybit https://bybit-exchange.github.io/docs/v5/market/orderbook
27
+ Coinbase https://docs.cdp.coinbase.com/exchange/reference/exchangerestapi_getproductbook
28
+ Kraken https://docs.kraken.com/api/docs/rest-api/get-order-book/
29
+
30
+ Key parameters you may want to modify:
31
+ normalized_symbol: Which ticker you wish monitor
32
+ sliding_window_num_intervals: We calc EMAs on pct_imbalance, bids_amount_usdt and asks_amount_usdt. Window size will impact this. It's quoted in # intervals.
33
+ It's measured in # intervals (or # loops), default 1200.
34
+ update_imabalce_csv_intervals: We'd update, or publish, imbalance data only at multiples of update_imabalce_csv_intervals.
35
+ It's measured in # intervals (or # loops), default 100.
36
+ topic_imbalance_data: Imbalance data is published to redis. This is published topic.
37
+ Since redis has special treatment for ':' (it'd consider it a folder), we'd replace ':' with '|'.
38
+ Example BTC/USDT:USDT will become BTC/USDT|USDT.
39
+ redis_ttl_ms: Imbalance data is published to redis. TTL of published data.
40
+ dump_imbalance_to_disk: Dump imbalance data to disk?
41
+ publish_imbalance_to_redis: Publish imbalance data to redis?
42
+
43
+ Launch.json if you wish to debug from VSCode:
44
+ {
45
+ "version": "0.2.0",
46
+ "configurations": [
47
+ {
48
+ "name": "Python Debugger: Current File",
49
+ "type": "debugpy",
50
+ "request": "launch",
51
+ "program": "${file}",
52
+ "console": "integratedTerminal",
53
+ "args" : [
54
+ "--normalized_symbol", "BTC/USDT:USDT",
55
+ "--sliding_window_num_intervals", "100",
56
+ "--update_imabalce_csv_intervals", "100",
57
+ "--topic_imbalance_data", "imbalance_BTC/USDT:USDT",
58
+ "--redis_ttl_ms", "3600000",
59
+
60
+ "--dump_imbalance_to_disk", "Y",
61
+ "--publish_imbalance_to_redis", "N"
62
+ ]
63
+ }
64
+ ]
65
+ }
66
+
67
+ '''
68
+ from ccxt.binance import binance
69
+ from ccxt.okx import okx
70
+ from ccxt.bybit import bybit
71
+ from ccxt.coinbase import coinbase
72
+ from ccxt.kraken import kraken
73
+ from ccxt.base.exchange import Exchange
74
+
75
+ param : Dict = {
76
+ 'normalized_symbol' : 'BTC/USDT:USDT',
77
+ 'market_type' : 'linear', # For spots, set to "spot". For perpectual, you need to look at ccxt doc, for most exchanges, it's 'linear' or 'swap' for perpetuals. Example, https://github.com/ccxt/ccxt/blob/master/python/ccxt/okx.py?plain=1#L1110
78
+ 'depth' : 1000,
79
+ 'price_level_increment' : 10,
80
+ 'sliding_window_num_intervals' : 1200, # For example if each iteration takes 2 sec. 90 intervals = 180 sec (i.e. three minutes)
81
+ 'update_imabalce_csv_intervals' : 100,
82
+ 'imbalance_output_file' : 'imbalance.csv',
83
+ 'dump_imbalance_to_disk' : True,
84
+ 'publish_imbalance_to_redis' : False,
85
+
86
+ # Provider ID is part of mds publish topic.
87
+ 'provider_id' : 1,
88
+
89
+
90
+ # Publish to message bus
91
+ 'mds' : {
92
+ 'mds_topic' : 'ccxt_rest_ob_$PROVIDER_ID$',
93
+ 'redis' : {
94
+ 'host' : 'localhost',
95
+ 'port' : 6379,
96
+ 'db' : 0,
97
+ 'ttl_ms' : 1000*60*15 # 15 min?
98
+ }
99
+
100
+ },
101
+
102
+ # Keep track of latency issues: ts_delta_observation_ms: Keep track of server clock vs timestamp from exchange
103
+ 'ts_delta_observation_ms_threshold' : 150
104
+ }
105
+
106
+ depth : int = param['depth']
107
+ market_type : str = param['market_type']
108
+ price_level_increment : float = param['price_level_increment']
109
+
110
+ logging.Formatter.converter = time.gmtime
111
+ logger = logging.getLogger()
112
+ log_level = logging.INFO # DEBUG --> INFO --> WARNING --> ERROR
113
+ logger.setLevel(log_level)
114
+ format_str = '%(asctime)s %(message)s'
115
+ formatter = logging.Formatter(format_str)
116
+ sh = logging.StreamHandler()
117
+ sh.setLevel(log_level)
118
+ sh.setFormatter(formatter)
119
+ logger.addHandler(sh)
120
+ # fh = logging.FileHandler(f"{param['job_name']}.log")
121
+ # fh.setLevel(log_level)
122
+ # fh.setFormatter(formatter)
123
+ # logger.addHandler(fh)
124
+
125
+
126
+ coinbase_param : Dict[str, int] = { }
127
+
128
+ binance_param : Dict[str, int] = {
129
+ 'limit' : depth if depth <=5000 else 5000
130
+ }
131
+
132
+ okx_param : Dict[str, int] = {
133
+ 'sz' : depth if depth <= 400 else 400
134
+ }
135
+
136
+
137
+ bybit_param : Dict[str, int] = {
138
+ 'limit' : depth if depth <= 200 else 200
139
+ }
140
+
141
+
142
+ kraken_param : Dict[str, int] = {
143
+ 'depth' : depth if depth <= 500 else 500
144
+ }
145
+
146
+
147
+ coinbase_exchange = coinbase({
148
+ 'defaultType' : market_type
149
+ })
150
+
151
+ binance_exchange = binance({
152
+ 'defaultType' : market_type
153
+ })
154
+
155
+ okx_exchange = okx({
156
+ 'defaultType' : market_type
157
+ })
158
+
159
+ bybit_exchange = bybit({
160
+ 'defaultType' : market_type
161
+ })
162
+
163
+ kraken_exchange = kraken({
164
+ 'defaultType' : market_type
165
+ })
166
+
167
+ class LogLevel(Enum):
168
+ CRITICAL = 50
169
+ ERROR = 40
170
+ WARNING = 30
171
+ INFO = 20
172
+ DEBUG = 10
173
+ NOTSET = 0
174
+
175
+
176
+ def log(message : str, log_level : LogLevel = LogLevel.INFO):
177
+ if log_level.value<LogLevel.WARNING.value:
178
+ logger.info(f"{datetime.now()} {message}")
179
+
180
+ elif log_level.value==LogLevel.WARNING.value:
181
+ logger.warning(f"{datetime.now()} {message}")
182
+
183
+ elif log_level.value==LogLevel.ERROR.value:
184
+ logger.error(f"{datetime.now()} {message}")
185
+
186
+ async def _fetch_orderbook(symbol : str, exchange : Exchange, fetch_ob_params : Dict):
187
+ try:
188
+ ob = exchange.fetch_order_book(symbol=symbol, params=fetch_ob_params)
189
+ is_valid = True
190
+ ts_delta_observation_ms : int = 0
191
+ if 'timestamp' in ob and ob['timestamp']:
192
+ update_ts_ms = ob['timestamp']
193
+ ts_delta_observation_ms = int(datetime.now().timestamp()*1000) - update_ts_ms
194
+ is_valid = True if ts_delta_observation_ms<=param['ts_delta_observation_ms_threshold'] else False
195
+
196
+ bid_prices = [ x[0] for x in ob['bids'] ]
197
+ ask_prices = [ x[0] for x in ob['asks'] ]
198
+ min_bid_price = min(bid_prices)
199
+ max_bid_price = max(bid_prices)
200
+ min_ask_price = min(ask_prices)
201
+ max_ask_price = max(ask_prices)
202
+
203
+ mid = (max([ x[0] for x in ob['bids'] ]) + min([ x[0] for x in ob['asks'] ])) / 2
204
+
205
+ log(f"{exchange.name} mid: {mid}, min_bid_price: {min_bid_price}, max_bid_price: {max_bid_price}, min_ask_price: {min_ask_price}, max_ask_price: {max_ask_price}, range: {int(max_ask_price-min_bid_price)}, ts_delta_observation_ms: {ts_delta_observation_ms}")
206
+
207
+ return {
208
+ 'source' : exchange.name,
209
+ 'orderbook' : ob,
210
+ 'mid' : mid,
211
+ 'min_bid_price' : min_bid_price,
212
+ 'max_bid_price' : max_bid_price,
213
+ 'min_ask_price' : min_ask_price,
214
+ 'max_ask_price' : max_ask_price,
215
+ 'is_valid' : is_valid,
216
+ 'ts_delta_observation_ms' : ts_delta_observation_ms
217
+ }
218
+ except Exception as fetch_err:
219
+ print(f"_fetch_orderbook failed for {exchange.name}: {fetch_err}")
220
+ return {
221
+ 'source' : exchange.name,
222
+ 'is_valid' : False
223
+ }
224
+
225
+
226
+ async def main():
227
+ def parse_args():
228
+ parser = argparse.ArgumentParser() # type: ignore
229
+
230
+ parser.add_argument("--normalized_symbol", help="Example BTC/USDT for spot. BTC/USDT:USDT for perps.",default="BTC/USDT")
231
+ parser.add_argument("--sliding_window_num_intervals", help="Sliding window is used for EMA's calculation. It's measured in # intervals (or # loops)",default=1200)
232
+ parser.add_argument("--update_imabalce_csv_intervals", help="We'd update, or publish, imbalance data only at multiples of update_imabalce_csv_intervals. Again, it's measured in # intervals.",default=100)
233
+
234
+ parser.add_argument("--topic_imbalance_data", help="Publish topic for imbalance data. Since redis has special treatment for ':' (it'd consider it a folder), we'd replace ':' with '|'. Example BTC/USDT:USDT will become BTC/USDT|USDT.",default=None)
235
+ parser.add_argument("--redis_ttl_ms", help="TTL for items published to redis. Default: 1000*60*60 (i.e. 1hr)",default=1000*60*60)
236
+
237
+ parser.add_argument("--dump_imbalance_to_disk", help="Y or N (default).", default='Y')
238
+ parser.add_argument("--publish_imbalance_to_redis", help="Y or N (default).", default='N')
239
+
240
+ args = parser.parse_args()
241
+ param['normalized_symbol'] = args.normalized_symbol
242
+ param['sliding_window_num_intervals'] = int(args.sliding_window_num_intervals)
243
+ param['update_imabalce_csv_intervals'] = int(args.update_imabalce_csv_intervals)
244
+ param['mds']['mds_topic'] = args.topic_imbalance_data if args.topic_imbalance_data else f"imbalance_data_{param['normalized_symbol'].replace(':','|')}"
245
+ param['redis_ttl_ms'] = int(args.redis_ttl_ms)
246
+
247
+ if args.dump_imbalance_to_disk:
248
+ if args.dump_imbalance_to_disk=='Y':
249
+ param['dump_imbalance_to_disk'] = True
250
+ else:
251
+ param['dump_imbalance_to_disk'] = False
252
+ else:
253
+ param['dump_imbalance_to_disk'] = False
254
+
255
+ if args.publish_imbalance_to_redis:
256
+ if args.publish_imbalance_to_redis=='Y':
257
+ param['publish_imbalance_to_redis'] = True
258
+ else:
259
+ param['publish_imbalance_to_redis'] = False
260
+ else:
261
+ param['publish_imbalance_to_redis'] = False
262
+
263
+ def init_redis_client():
264
+ redis_client = StrictRedis(
265
+ host = param['mds']['redis']['host'],
266
+ port = param['mds']['redis']['port'],
267
+ db = 0,
268
+ ssl = False
269
+ )
270
+ try:
271
+ redis_client.keys()
272
+ except ConnectionError as redis_conn_error:
273
+ err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
274
+ raise ConnectionError(err_msg)
275
+
276
+ return redis_client
277
+
278
+ parse_args()
279
+ redis_client = init_redis_client()
280
+
281
+ normalized_symbol : str = param['normalized_symbol']
282
+ param['job_name'] = f'ccxt_agg_ob_provider_{normalized_symbol.replace(":","_").replace("/","-")}'
283
+ sliding_window_num_intervals : int = param['sliding_window_num_intervals']
284
+ update_imabalce_csv_intervals : int = param['update_imabalce_csv_intervals']
285
+
286
+ pd_imbalances = pd.DataFrame(columns=['timestamp_ms', 'datetime', 'mid', 'imbalance', 'bids_amount_usdt', 'asks_amount_usdt', 'total_amount', 'pct_imbalance', 'ema_pct_imbalance', 'ema_bids_amount_usdt', 'ema_asks_amount_usdt'])
287
+ mid : Union[float, None] = None
288
+ imbalance : Union[float, None] = None
289
+ pct_imbalance : Union[float, None] = None
290
+ sum_bids_amount_usdt : Union[float, None] = None
291
+ sum_asks_amount_usdt : Union[float, None] = None
292
+ total_amount : Union[float, None] = None
293
+ last_ema_pct_imbalance : Union[float, None] = None
294
+ last_ema_bids_amount_usdt : Union[float, None] = None
295
+ last_ema_asks_amount_usdt : Union[float, None] = None
296
+
297
+ i = 0
298
+ total_loop_elapsed_ms : int = 0
299
+ while True:
300
+ loop_start = time.time()
301
+
302
+ try:
303
+ # Comment out Coinbase and Kraken spot only exchanges.
304
+ orderbooks = await asyncio.gather(
305
+ # _fetch_orderbook(symbol=normalized_symbol, exchange=coinbase_exchange, fetch_ob_params=coinbase_param),
306
+ _fetch_orderbook(symbol=normalized_symbol, exchange=binance_exchange, fetch_ob_params=binance_param),
307
+ _fetch_orderbook(symbol=normalized_symbol, exchange=bybit_exchange, fetch_ob_params=bybit_param),
308
+ # _fetch_orderbook(symbol=normalized_symbol, exchange=kraken_exchange, fetch_ob_params=kraken_param),
309
+ _fetch_orderbook(symbol=normalized_symbol, exchange=okx_exchange, fetch_ob_params=okx_param)
310
+ )
311
+ valid_orderbooks = [ ob for ob in orderbooks if ob['is_valid'] ]
312
+ invalid_orderbooks = [ ob for ob in orderbooks if not ob['is_valid'] ]
313
+ invalid_orderbooks_names = " ".join([ ob['source'] for ob in invalid_orderbooks ] )
314
+
315
+ max_min_bid_price = max([ ob['min_bid_price'] for ob in valid_orderbooks if ob])
316
+ best_bid_price = max([ob['max_bid_price'] for ob in valid_orderbooks if ob])
317
+ min_max_ask_price = min([ob['max_ask_price'] for ob in valid_orderbooks if ob])
318
+ best_ask_price = min([ob['min_ask_price'] for ob in valid_orderbooks if ob])
319
+
320
+ elapsed_ms = (time.time() - loop_start) * 1000
321
+ logger.info(f"orderbooks fetch elapsed (ms): {elapsed_ms}, # orderbooks: {len(valid_orderbooks)}, max_min_bid_price: {max_min_bid_price}, min_max_ask_price: {min_max_ask_price}, best_bid_price: {best_bid_price}, best_ask_price: {best_ask_price}. Invalid books: {invalid_orderbooks_names}")
322
+
323
+ aggregated_orderbooks = {
324
+ 'bids' : {},
325
+ 'asks' : {}
326
+ }
327
+
328
+ def round_to_nearest(price, increment):
329
+ return round(price / increment) * increment
330
+
331
+ mid = [ x['mid'] for x in valid_orderbooks if x['source']=='Binance'][0] # use Binance as mid reference
332
+ for orderbook in valid_orderbooks:
333
+ bids = orderbook['orderbook']['bids']
334
+ asks = orderbook['orderbook']['asks']
335
+
336
+ for bid in bids:
337
+ price = round_to_nearest(bid[0], price_level_increment)
338
+ amount = bid[1]
339
+ if bid[0] > max_min_bid_price:
340
+ existing_amount = 0
341
+ if price in aggregated_orderbooks['bids']:
342
+ existing_amount = aggregated_orderbooks['bids'][price]['amount']
343
+ amount_in_base_ccy = existing_amount + amount
344
+ amount_in_usdt = amount_in_base_ccy * mid
345
+ aggregated_orderbooks['bids'][price] = {
346
+ 'price' : price,
347
+ 'amount' : amount_in_base_ccy,
348
+ 'amount_usdt' : amount_in_usdt
349
+ }
350
+
351
+ for ask in asks:
352
+ price = round_to_nearest(ask[0], price_level_increment)
353
+ amount = ask[1]
354
+ if ask[0] < min_max_ask_price:
355
+ existing_amount = 0
356
+ if price in aggregated_orderbooks['asks']:
357
+ existing_amount = aggregated_orderbooks['asks'][price]['amount']
358
+ amount_in_base_ccy = existing_amount + amount
359
+ amount_in_usdt = amount_in_base_ccy * mid
360
+ aggregated_orderbooks['asks'][price] = {
361
+ 'price' : price,
362
+ 'amount' : amount_in_base_ccy,
363
+ 'amount_usdt' : amount_in_usdt
364
+ }
365
+
366
+ sorted_asks = dict(sorted(aggregated_orderbooks['asks'].items(), key=lambda item: item[0], reverse=True))
367
+ sorted_bids = dict(sorted(aggregated_orderbooks['bids'].items(), key=lambda item: item[0], reverse=True))
368
+
369
+ pd_aggregated_orderbooks_asks = pd.DataFrame(sorted_asks)
370
+ pd_aggregated_orderbooks_bids = pd.DataFrame(sorted_bids)
371
+
372
+ pd_aggregated_orderbooks_asks = pd_aggregated_orderbooks_asks.transpose()
373
+ pd_aggregated_orderbooks_bids = pd_aggregated_orderbooks_bids.transpose()
374
+
375
+ sum_asks_amount_usdt = pd.to_numeric(pd_aggregated_orderbooks_asks['amount_usdt']).sum()
376
+ sum_bids_amount_usdt = pd.to_numeric(pd_aggregated_orderbooks_bids['amount_usdt']).sum()
377
+
378
+ pd_aggregated_orderbooks_asks['str_amount_usdt'] = pd_aggregated_orderbooks_asks['amount_usdt'].apply(lambda x: f'{x:,.2f}')
379
+ pd_aggregated_orderbooks_bids['str_amount_usdt'] = pd_aggregated_orderbooks_bids['amount_usdt'].apply(lambda x: f'{x:,.2f}')
380
+
381
+ ask_resistance_price_level = pd_aggregated_orderbooks_asks['amount_usdt'].idxmax()
382
+ bid_support_price_level = pd_aggregated_orderbooks_bids['amount_usdt'].idxmax()
383
+
384
+ pd_aggregated_orderbooks_asks['is_max_amount_usdt'] = pd_aggregated_orderbooks_asks.index == ask_resistance_price_level
385
+ pd_aggregated_orderbooks_bids['is_max_amount_usdt'] = pd_aggregated_orderbooks_bids.index == bid_support_price_level
386
+
387
+ pd_aggregated_orderbooks_asks_ = pd_aggregated_orderbooks_asks[['price', 'amount', 'str_amount_usdt', 'is_max_amount_usdt']]
388
+ pd_aggregated_orderbooks_asks_.rename(columns={'str_amount_usdt': 'amount_usdt'}, inplace=True)
389
+ pd_aggregated_orderbooks_bids_ = pd_aggregated_orderbooks_bids[['price', 'amount', 'str_amount_usdt', 'is_max_amount_usdt']]
390
+ pd_aggregated_orderbooks_bids_.rename(columns={'str_amount_usdt': 'amount_usdt'}, inplace=True)
391
+
392
+ spread_bps = (best_ask_price-best_bid_price) / mid * 10000
393
+ spread_bps = round(spread_bps, 0)
394
+ imbalance = sum_bids_amount_usdt - sum_asks_amount_usdt if sum_bids_amount_usdt and sum_asks_amount_usdt else None
395
+ total_amount = sum_bids_amount_usdt + sum_asks_amount_usdt if sum_bids_amount_usdt and sum_asks_amount_usdt else None
396
+ pct_imbalance = (imbalance/total_amount) * 100 if imbalance and total_amount else None
397
+
398
+ log(f"mid: {mid}, imbalance (bids - asks): {imbalance:,.0f}, pct_imbalance: {pct_imbalance:,.2f}, last_ema_pct_imbalance: {last_ema_pct_imbalance if last_ema_pct_imbalance else '--'}, spread_bps between bests: {spread_bps} (If < 0, arb opportunity). Range {max_min_bid_price} - {min_max_ask_price} (${int(min_max_ask_price-max_min_bid_price)})")
399
+
400
+ if last_ema_bids_amount_usdt and last_ema_asks_amount_usdt:
401
+ log(f"ema_bids_amount_usdt: {last_ema_bids_amount_usdt:,.0f}, ema_asks_amount_usdt: {last_ema_asks_amount_usdt:,.0f}")
402
+
403
+ log(f"asks USD {sum_asks_amount_usdt:,.0f}, best: {best_ask_price:,.2f}")
404
+ log(f"{tabulate(pd_aggregated_orderbooks_asks_.reset_index(drop=True), headers='keys', tablefmt='psql', colalign=('right', 'right', 'right'), showindex=False)}") # type: ignore Otherwise error: tabulate Argument of type "DataFrame" cannot be assigned to parameter "tabular_data" of type "Mapping[str, Iterable[Any]] | Iterable[Iterable[Any]]" in function "tabulate"
405
+
406
+ log(f"bids USD {sum_bids_amount_usdt:,.0f}, best: {best_bid_price:,.2f}")
407
+ log(f"{tabulate(pd_aggregated_orderbooks_bids_.reset_index(drop=True), headers='keys', tablefmt='psql', colalign=('right', 'right', 'right'), showindex=False)}") # type: ignore Otherwise error: tabulate Argument of type "DataFrame" cannot be assigned to parameter "tabular_data" of type "Mapping[str, Iterable[Any]] | Iterable[Iterable[Any]]" in function "tabulate"
408
+
409
+ except Exception as loop_err:
410
+ log(f"#{i} Error: {loop_err}")
411
+
412
+ finally:
413
+ this_loop_elapsed_ms : int = int((time.time()-loop_start)*1000)
414
+ total_loop_elapsed_ms += this_loop_elapsed_ms
415
+ avg_loop_elapsed_ms : int = int(total_loop_elapsed_ms / (i+1))
416
+ sliding_window_num_sec : int = int(sliding_window_num_intervals*avg_loop_elapsed_ms/1000)
417
+ log(f"#{i} this_loop_elapsed_ms: {this_loop_elapsed_ms}, avg_loop_elapsed_ms: {avg_loop_elapsed_ms}, sliding_window_num_intervals: {sliding_window_num_intervals}, sliding_window_num_sec: {sliding_window_num_sec}")
418
+
419
+ pct_imbalance = imbalance/total_amount * 100 if imbalance and total_amount else None
420
+ pd_imbalances.loc[i] = [ int(loop_start*1000), datetime.fromtimestamp(loop_start), mid, imbalance, sum_bids_amount_usdt, sum_asks_amount_usdt, total_amount, pct_imbalance, np.nan, np.nan, np.nan ]
421
+
422
+ if i%update_imabalce_csv_intervals==0:
423
+ if pd_imbalances.shape[0]>sliding_window_num_intervals:
424
+ pd_imbalances['ema_pct_imbalance'] = pd_imbalances['pct_imbalance'].ewm(span=sliding_window_num_intervals, adjust=False).mean()
425
+ pd_imbalances['ema_bids_amount_usdt'] = pd_imbalances['bids_amount_usdt'].ewm(span=sliding_window_num_intervals, adjust=False).mean()
426
+ pd_imbalances['ema_asks_amount_usdt'] = pd_imbalances['asks_amount_usdt'].ewm(span=sliding_window_num_intervals, adjust=False).mean()
427
+ last_ema_pct_imbalance = pd_imbalances['ema_pct_imbalance'].iloc[-1]
428
+ last_ema_bids_amount_usdt = pd_imbalances['ema_bids_amount_usdt'].iloc[-1]
429
+ last_ema_asks_amount_usdt = pd_imbalances['ema_asks_amount_usdt'].iloc[-1]
430
+
431
+ data : Dict[str, Union[str, float, int, None]] = {
432
+ 'normalized_symbol' : normalized_symbol,
433
+ 'timestamp_ms' : int(datetime.now().timestamp() * 1000),
434
+
435
+ 'ema_pct_imbalance' : last_ema_pct_imbalance,
436
+ 'ema_bids_amount_usdt' : last_ema_bids_amount_usdt,
437
+ 'ema_asks_amount_usdt' : last_ema_asks_amount_usdt,
438
+
439
+ 'avg_loop_elapsed_ms' : avg_loop_elapsed_ms,
440
+ 'sliding_window_num_intervals' : sliding_window_num_intervals,
441
+ 'sliding_window_num_sec' : sliding_window_num_sec
442
+ }
443
+ redis_client.set(name=param['mds']['mds_topic'], value=json.dumps(data), ex=int(param['mds']['redis']['ttl_ms']/1000))
444
+
445
+ if param['dump_imbalance_to_disk']:
446
+ pd_imbalances.to_csv(param['imbalance_output_file'])
447
+
448
+ i += 1
449
+
450
+
451
+ asyncio.run(main())