siglab-py 0.5.66__py3-none-any.whl → 0.6.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siglab-py might be problematic. Click here for more details.

@@ -9,9 +9,9 @@ import math
9
9
  import pandas as pd
10
10
  import numpy as np
11
11
  import asyncio
12
+ from tabulate import tabulate
12
13
 
13
14
  from ccxt.base.exchange import Exchange as CcxtExchange
14
- from ccxt import deribit
15
15
  import ccxt
16
16
  import ccxt.pro as ccxtpro
17
17
 
@@ -24,6 +24,8 @@ import yfinance as yf
24
24
  from siglab_py.util.retry_util import retry
25
25
  from siglab_py.exchanges.futubull import Futubull
26
26
  from siglab_py.exchanges.any_exchange import AnyExchange
27
+ from siglab_py.exchanges.deribit import Deribit, DeribitAsync
28
+ from siglab_py.exchanges.binance import Binance, BinanceAsync
27
29
 
28
30
  def instantiate_exchange(
29
31
  exchange_name : str,
@@ -31,6 +33,7 @@ def instantiate_exchange(
31
33
  secret : Union[str, None] = None,
32
34
  passphrase : Union[str, None] = None,
33
35
  default_type : Union[str, None] = 'spot',
36
+ default_sub_type : Union[str, None] = None,
34
37
  rate_limit_ms : float = 100
35
38
  ) -> Union[AnyExchange, None]:
36
39
  exchange_name = exchange_name.lower().strip()
@@ -46,6 +49,10 @@ def instantiate_exchange(
46
49
  'defaultType' : default_type
47
50
  }
48
51
  }
52
+
53
+ if default_sub_type:
54
+ exchange_params['defaultSubType'] = default_sub_type
55
+
49
56
  if api_key:
50
57
  exchange_params['apiKey'] = api_key
51
58
  if secret:
@@ -54,13 +61,13 @@ def instantiate_exchange(
54
61
  exchange_params['passphrase'] = passphrase
55
62
 
56
63
  if exchange_name=='binance':
57
- exchange = ccxt.binance(exchange_params) # type: ignore
64
+ exchange = Binance(exchange_params) # type: ignore
58
65
  elif exchange_name=='bybit':
59
66
  exchange = ccxt.bybit(exchange_params) # type: ignore
60
67
  elif exchange_name=='okx':
61
68
  exchange = ccxt.okx(exchange_params) # type: ignore
62
69
  elif exchange_name=='deribit':
63
- exchange = ccxt.deribit(exchange_params) # type: ignore
70
+ exchange = Deribit(exchange_params) # type: ignore
64
71
  elif exchange_name=='hyperliquid':
65
72
  exchange = ccxt.hyperliquid(
66
73
  {
@@ -82,8 +89,10 @@ async def async_instantiate_exchange(
82
89
  api_key : str,
83
90
  secret : str,
84
91
  passphrase : str,
85
- default_type : str,
86
- rate_limit_ms : float = 100
92
+ default_type : Union[str, None] = 'spot',
93
+ default_sub_type : Union[str, None] = None,
94
+ rate_limit_ms : float = 100,
95
+ verbose : bool = False
87
96
  ) -> Union[AnyExchange, None]:
88
97
  exchange : Union[AnyExchange, None] = None
89
98
  exchange_name : str = gateway_id.split('_')[0]
@@ -98,13 +107,17 @@ async def async_instantiate_exchange(
98
107
  'rateLimit' : rate_limit_ms,
99
108
  'options' : {
100
109
  'defaultType' : default_type
101
- }
110
+ },
111
+ 'verbose': verbose
102
112
  }
113
+
114
+ if default_sub_type:
115
+ exchange_params['defaultSubType'] = default_sub_type
103
116
 
104
117
  if exchange_name=='binance':
105
118
  # spot, future, margin, delivery, option
106
119
  # https://github.com/ccxt/ccxt/blob/master/python/ccxt/binance.py#L1298
107
- exchange = ccxtpro.binance(exchange_params) # type: ignore
120
+ exchange = BinanceAsync(exchange_params) # type: ignore
108
121
  elif exchange_name=='bybit':
109
122
  # spot, linear, inverse, futures
110
123
  # https://github.com/ccxt/ccxt/blob/master/python/ccxt/bybit.py#L1041
@@ -117,7 +130,7 @@ async def async_instantiate_exchange(
117
130
  elif exchange_name=='deribit':
118
131
  # spot, swap, future
119
132
  # https://github.com/ccxt/ccxt/blob/master/python/ccxt/deribit.py#L360
120
- exchange = ccxtpro.deribit(exchange_params) # type: ignore
133
+ exchange = DeribitAsync(exchange_params) # type: ignore
121
134
  elif exchange_name=='kraken':
122
135
  exchange = ccxtpro.kraken(exchange_params) # type: ignore
123
136
  elif exchange_name=='hyperliquid':
@@ -156,9 +169,10 @@ async def async_instantiate_exchange(
156
169
  "walletAddress" : api_key,
157
170
  "privateKey" : secret,
158
171
  'enableRateLimit' : True,
159
- 'rateLimit' : rate_limit_ms
160
- }
161
- ) # type: ignore
172
+ 'rateLimit' : rate_limit_ms,
173
+ 'verbose': verbose
174
+ } # type: ignore
175
+ )
162
176
  else:
163
177
  raise ValueError(f"Unsupported exchange {exchange_name}, check gateway_id {gateway_id}.")
164
178
 
@@ -192,18 +206,18 @@ def timestamp_to_datetime_cols(pd_candles : pd.DataFrame):
192
206
  pd_candles['timestamp_ms'] = pd_candles['timestamp_ms'].apply(_fix_timestamp_ms)
193
207
  pd_candles['datetime'] = pd_candles['timestamp_ms'].apply(lambda x: datetime.fromtimestamp(int(x/1000)))
194
208
  pd_candles['datetime'] = pd.to_datetime(pd_candles['datetime'])
195
- pd_candles['datetime'] = pd_candles['datetime'].dt.tz_localize(None)
209
+ pd_candles['datetime'] = pd_candles['datetime'].dt.tz_localize(None) # type: ignore
196
210
  pd_candles['datetime_utc'] = pd_candles['timestamp_ms'].apply(
197
211
  lambda x: datetime.fromtimestamp(int(x.timestamp()) if isinstance(x, pd.Timestamp) else int(x / 1000), tz=timezone.utc)
198
212
  )
199
213
 
200
214
  # This is to make it easy to do grouping with Excel pivot table
201
- pd_candles['year'] = pd_candles['datetime'].dt.year
202
- pd_candles['month'] = pd_candles['datetime'].dt.month
203
- pd_candles['day'] = pd_candles['datetime'].dt.day
204
- pd_candles['hour'] = pd_candles['datetime'].dt.hour
205
- pd_candles['minute'] = pd_candles['datetime'].dt.minute
206
- pd_candles['dayofweek'] = pd_candles['datetime'].dt.dayofweek # dayofweek: Monday is 0 and Sunday is 6
215
+ pd_candles['year'] = pd_candles['datetime'].dt.year # type: ignore
216
+ pd_candles['month'] = pd_candles['datetime'].dt.month # type: ignore
217
+ pd_candles['day'] = pd_candles['datetime'].dt.day # type: ignore
218
+ pd_candles['hour'] = pd_candles['datetime'].dt.hour # type: ignore
219
+ pd_candles['minute'] = pd_candles['datetime'].dt.minute # type: ignore
220
+ pd_candles['dayofweek'] = pd_candles['datetime'].dt.dayofweek # type: ignore dayofweek: Monday is 0 and Sunday is 6
207
221
 
208
222
  pd_candles['week_of_month'] = pd_candles['timestamp_ms'].apply(
209
223
  lambda x: timestamp_to_week_of_month(int(x/1000))
@@ -220,42 +234,58 @@ def timestamp_to_datetime_cols(pd_candles : pd.DataFrame):
220
234
  )
221
235
 
222
236
  pd_candles['timestamp_ms_gap'] = pd_candles['timestamp_ms'] - pd_candles['timestamp_ms'].shift(1)
223
- timestamp_ms_gap = pd_candles.iloc[-1]['timestamp_ms_gap']
224
- assert(pd_candles[~pd_candles.timestamp_ms_gap.isna()][pd_candles.timestamp_ms_gap!=timestamp_ms_gap].shape[0]==0)
237
+
238
+ # Depending on asset, minutes bar may have gaps
239
+ timestamp_ms_gap_median = pd_candles['timestamp_ms_gap'].median()
240
+ NUM_MS_IN_1HR = 60*60*1000
241
+ if timestamp_ms_gap_median>=NUM_MS_IN_1HR:
242
+ num_rows_with_expected_gap = pd_candles[~pd_candles.timestamp_ms_gap.isna()][pd_candles.timestamp_ms_gap==timestamp_ms_gap_median].shape[0]
243
+ assert(num_rows_with_expected_gap/pd_candles.shape[0]>0.9)
225
244
  pd_candles.drop(columns=['timestamp_ms_gap'], inplace=True)
226
245
 
246
+ '''
247
+ APAC (Asia-Pacific) Trading Hours
248
+ UTC 21:00 - 09:00 (approximate range)
249
+ Major financial centers: Tokyo, Hong Kong, Singapore, Sydney
250
+
251
+ EMEA (Europe, Middle East, Africa) Trading Hours
252
+ UTC 07:00 - 16:00 (approximate range)
253
+ Major financial centers: London, Frankfurt, Paris, Zurich, Dubai
254
+
255
+ US Trading Hours
256
+ UTC 13:00 - 22:00 (approximate range)
257
+ Major financial centers: New York, Chicago
258
+ Key markets: NYSE, NASDAQ
259
+
260
+ utcnow and utcfromtimestamp been deprecated in Python 3.12
261
+ https://www.pythonmorsels.com/converting-to-utc-time/
262
+
263
+ Example, UTC 23:00 is 3rd hour in APAC trading session
264
+ utc_hour = 23
265
+ i = get_regions_trading_utc_hours()['APAC'].index(utc_hour)
266
+ assert(i==2)
267
+ '''
268
+ def get_regions_trading_utc_hours():
269
+ return {
270
+ 'APAC' : [21,22,23,0,1,2,3,4,5,6,7,8,9],
271
+ 'EMEA' : [7,8,9,10,11,12,13,14,15,16],
272
+ 'AMER' : [13,14,15,16,17,18,19,20,21,22]
273
+ }
274
+
227
275
  def timestamp_to_active_trading_regions(
228
276
  timestamp_ms : int
229
277
  ) -> List[str]:
230
-
231
- '''
232
- APAC (Asia-Pacific) Trading Hours
233
- UTC 22:00 - 09:00 (approximate range)
234
- Major financial centers: Tokyo, Hong Kong, Singapore, Sydney
235
-
236
- EMEA (Europe, Middle East, Africa) Trading Hours
237
- UTC 07:00 - 16:00 (approximate range)
238
- Major financial centers: London, Frankfurt, Paris, Zurich, Dubai
239
-
240
- US Trading Hours
241
- UTC 13:30 - 20:00 (approximate range)
242
- Major financial centers: New York, Chicago
243
- Key markets: NYSE, NASDAQ
244
-
245
- utcnow and utcfromtimestamp been deprecated in Python 3.12
246
- https://www.pythonmorsels.com/converting-to-utc-time/
247
- '''
248
278
  active_trading_regions : List[str] = []
249
279
 
250
280
  dt_utc = datetime.fromtimestamp(int(timestamp_ms / 1000), tz=timezone.utc)
251
281
  utc_hour = dt_utc.hour
252
- if (utc_hour >= 22) or (utc_hour <= 9):
282
+ if utc_hour in get_regions_trading_utc_hours()['APAC']:
253
283
  active_trading_regions.append("APAC")
254
284
 
255
- if 7 <= utc_hour <= 16:
285
+ if utc_hour in get_regions_trading_utc_hours()['EMEA']:
256
286
  active_trading_regions.append("EMEA")
257
287
 
258
- if 13 <= utc_hour <= 20:
288
+ if utc_hour in get_regions_trading_utc_hours()['AMER']:
259
289
  active_trading_regions.append("AMER")
260
290
 
261
291
  return active_trading_regions
@@ -288,6 +318,17 @@ def fix_column_types(pd_candles : pd.DataFrame):
288
318
  pd_candles.reset_index(drop=True, inplace=True)
289
319
  pd_candles.sort_values("datetime", inplace=True)
290
320
 
321
+ def interval_to_ms(interval : str) -> int:
322
+ interval_ms : int = 0
323
+ if interval=="d":
324
+ interval_ms = 24*60*60*1000
325
+ elif interval=="h":
326
+ interval_ms = 60*60*1000
327
+ elif interval=="m":
328
+ interval_ms = 60*1000
329
+
330
+ return interval_ms
331
+
291
332
  '''
292
333
  https://polygon.io/docs/stocks
293
334
  '''
@@ -343,7 +384,7 @@ class NASDAQExchange:
343
384
  pd_daily_candles['low'] = pd_daily_candles['low'].astype(str).str.replace('$','')
344
385
  pd_daily_candles['close'] = pd_daily_candles['close'].astype(str).str.replace('$','')
345
386
  pd_daily_candles['datetime']= pd.to_datetime(pd_daily_candles['datetime'])
346
- pd_daily_candles['timestamp_ms'] = pd_daily_candles.datetime.values.astype(np.int64) // 10 ** 6
387
+ pd_daily_candles['timestamp_ms'] = pd_daily_candles.datetime.values.astype(np.int64) // 10 ** 6 # type: ignore
347
388
  pd_daily_candles['symbol'] = symbol
348
389
  pd_daily_candles['exchange'] = 'nasdaq'
349
390
  fix_column_types(pd_daily_candles)
@@ -362,7 +403,7 @@ class NASDAQExchange:
362
403
  )
363
404
 
364
405
  # When you fill foward, a few candles before start date can have null values (open, high, low, close, volume ...)
365
- first_candle_dt = pd_hourly_candles[(~pd_hourly_candles.close.isna()) & (pd_hourly_candles['datetime'].dt.time == pd.Timestamp('00:00:00').time())].iloc[0]['datetime']
406
+ first_candle_dt = pd_hourly_candles[(~pd_hourly_candles.close.isna()) & (pd_hourly_candles['datetime'].dt.time == pd.Timestamp('00:00:00').time())].iloc[0]['datetime'] # type: ignore
366
407
  pd_hourly_candles = pd_hourly_candles[pd_hourly_candles.datetime>=first_candle_dt]
367
408
  exchange_candles[symbol] = pd_hourly_candles
368
409
 
@@ -426,6 +467,45 @@ class YahooExchange:
426
467
 
427
468
  return exchange_candles
428
469
 
470
+ def aggregate_candles(
471
+ interval : str,
472
+ pd_candles : pd.DataFrame
473
+ ) -> pd.DataFrame:
474
+ if interval[-1]=='m':
475
+ # 'm' for pandas means months!
476
+ interval = interval.replace('m','min')
477
+ pd_candles.set_index('datetime', inplace=True)
478
+ pd_candles_aggregated = pd_candles.resample(interval).agg({
479
+ 'exchange' : 'first',
480
+ 'symbol' : 'first',
481
+ 'timestamp_ms' : 'first',
482
+
483
+ 'open': 'first',
484
+ 'high': 'max',
485
+ 'low': 'min',
486
+ 'close': 'last',
487
+ 'volume': 'sum',
488
+
489
+ 'datetime_utc' : 'first',
490
+ 'year' : 'first',
491
+ 'month' : 'first',
492
+ 'day' : 'first',
493
+ 'hour' : 'first',
494
+ 'minute' : 'first',
495
+ 'dayofweek' : 'first',
496
+ 'week_of_month' : 'first',
497
+
498
+ 'apac_trading_hr' : 'first',
499
+ 'emea_trading_hr' : 'first',
500
+ 'amer_trading_hr' : 'first',
501
+
502
+ 'pct_chg_on_close' : 'sum',
503
+
504
+ })
505
+ pd_candles.reset_index(inplace=True)
506
+ pd_candles_aggregated.reset_index(inplace=True)
507
+ return pd_candles_aggregated
508
+
429
509
  def fetch_historical_price(
430
510
  exchange,
431
511
  normalized_symbol : str,
@@ -472,19 +552,21 @@ def fetch_candles(
472
552
  validation_max_gaps : int = 10,
473
553
  validation_max_end_date_intervals : int = 1
474
554
  ) -> Dict[str, Union[pd.DataFrame, None]]:
475
-
555
+ exchange_candles = { '' : None }
556
+ num_intervals = int(candle_size.replace(candle_size[-1],''))
557
+
476
558
  if end_ts>datetime.now().timestamp():
477
559
  end_ts = int(datetime.now().timestamp())
478
560
 
479
561
  if type(exchange) is YahooExchange:
480
- return exchange.fetch_candles(
562
+ exchange_candles = exchange.fetch_candles(
481
563
  start_ts=start_ts,
482
564
  end_ts=end_ts,
483
565
  symbols=normalized_symbols,
484
566
  candle_size=candle_size
485
567
  )
486
568
  elif type(exchange) is NASDAQExchange:
487
- return exchange.fetch_candles(
569
+ exchange_candles = exchange.fetch_candles(
488
570
  start_ts=start_ts,
489
571
  end_ts=end_ts,
490
572
  symbols=normalized_symbols,
@@ -501,17 +583,51 @@ def fetch_candles(
501
583
  pd_candles = exchange_candles[symbol]
502
584
  if not pd_candles is None:
503
585
  fix_column_types(pd_candles) # You don't want to do this from Futubull as you'd need import Futubull from there: Circular references
504
- return exchange_candles
586
+
505
587
  elif issubclass(exchange.__class__, CcxtExchange):
506
- return _fetch_candles_ccxt(
588
+ exchange_candles = _fetch_candles_ccxt(
507
589
  start_ts=start_ts,
508
590
  end_ts=end_ts,
509
591
  exchange=exchange,
510
592
  normalized_symbols=normalized_symbols,
511
593
  candle_size=candle_size,
512
- num_candles_limit=num_candles_limit
594
+ num_candles_limit=num_candles_limit,
595
+ logger=logger
513
596
  )
514
- return { '' : None }
597
+ if num_intervals!=1:
598
+ for symbol in exchange_candles:
599
+ if not exchange_candles[symbol] is None:
600
+ exchange_candles[symbol] = aggregate_candles(candle_size, exchange_candles[symbol]) # type: ignore
601
+
602
+ # For invalid rows missing timestamps, o/h/l/c/v, fill forward close, set volume to zero.
603
+ for symbol in exchange_candles:
604
+ pd_candles = exchange_candles[symbol]
605
+
606
+ if pd_candles is not None:
607
+ mask_invalid_candles = pd_candles["timestamp_ms"].isna()
608
+ if mask_invalid_candles.any():
609
+ pd_invalid_candles = pd_candles[mask_invalid_candles]
610
+
611
+ if logger is not None:
612
+ logger.warning(f"Dropping {pd_invalid_candles.shape[0]}/{pd_candles.shape[0]} rows from {symbol} candles (null timestamp_ms)") # type: ignore
613
+ logger.warning(f"{tabulate(pd_invalid_candles, headers='keys', tablefmt='psql')}") # type: ignore
614
+
615
+ def _to_timestamp_ms(dt):
616
+ if pd.isna(dt):
617
+ return pd.NA
618
+ if isinstance(dt, str):
619
+ dt = pd.to_datetime(dt)
620
+ return int(dt.timestamp() * 1000)
621
+
622
+ pd_candles.loc[mask_invalid_candles, "timestamp_ms"] = pd_candles.loc[mask_invalid_candles, "datetime"].apply(_to_timestamp_ms)
623
+
624
+ pd_candles["close"] = pd_candles["close"].ffill()
625
+ pd_candles.loc[mask_invalid_candles, ["open", "high", "low"]] = pd_candles.loc[
626
+ mask_invalid_candles, ["close"]
627
+ ]
628
+ pd_candles.loc[mask_invalid_candles, "volume"] = 0.0
629
+
630
+ return exchange_candles # type: ignore
515
631
 
516
632
  '''
517
633
  Find listing date https://gist.github.com/mr-easy/5185b1dcdd5f9f908ff196446f092e9b
@@ -541,10 +657,9 @@ def _fetch_candles_ccxt(
541
657
  exchange,
542
658
  normalized_symbols : List[str],
543
659
  candle_size : str,
544
- num_candles_limit : int = 100
660
+ num_candles_limit : int = 100,
661
+ logger = None
545
662
  ) -> Dict[str, Union[pd.DataFrame, None]]:
546
- logger = logging.getLogger()
547
-
548
663
  rsp = {}
549
664
 
550
665
  exchange.load_markets()
@@ -552,7 +667,7 @@ def _fetch_candles_ccxt(
552
667
  num_tickers = len(normalized_symbols)
553
668
  i = 0
554
669
  for ticker in normalized_symbols:
555
- @retry(num_attempts=3, pause_between_retries_ms=1000)
670
+ @retry(num_attempts=3, pause_between_retries_ms=1000, logger=logger)
556
671
  def _fetch_ohlcv(exchange, symbol, timeframe, since, limit, params) -> Union[List, NoReturn]:
557
672
  one_timeframe = f"1{timeframe[-1]}"
558
673
  candles = exchange.fetch_ohlcv(symbol=symbol, timeframe=one_timeframe, since=since, limit=limit, params=params)
@@ -563,7 +678,7 @@ def _fetch_candles_ccxt(
563
678
 
564
679
  def _calc_increment(candle_size):
565
680
  increment = 1
566
- num_intervals = int(candle_size[0])
681
+ num_intervals = int(candle_size.replace(candle_size[-1],''))
567
682
  interval_type = candle_size[-1]
568
683
  if interval_type == "m":
569
684
  increment = 60
@@ -575,7 +690,8 @@ def _fetch_candles_ccxt(
575
690
  raise ValueError(f"Invalid candle_size {candle_size}")
576
691
  return num_intervals * increment
577
692
 
578
- logger.info(f"{i}/{num_tickers} Fetching {candle_size} candles for {ticker}.")
693
+ if logger:
694
+ logger.info(f"{i}/{num_tickers} Fetching {candle_size} candles for {ticker}.")
579
695
 
580
696
  '''
581
697
  It uses a while loop to implement a sliding window to download candles between start_ts and end_ts.
@@ -584,12 +700,13 @@ def _fetch_candles_ccxt(
584
700
  A more efficient way is to find listing date. Start looping from there.
585
701
  '''
586
702
  market = exchange.markets[ticker]
703
+ this_ticker_start_ts = start_ts
587
704
  if market['created']:
588
- start_ts = max(start_ts, int(market['created']/1000))
705
+ this_ticker_start_ts = max(this_ticker_start_ts, int(market['created']/1000))
589
706
 
590
707
  all_candles = []
591
708
  params = {}
592
- this_cutoff = start_ts
709
+ this_cutoff = this_ticker_start_ts
593
710
  while this_cutoff<end_ts:
594
711
  candles = _fetch_ohlcv(exchange=exchange, symbol=ticker, timeframe=candle_size, since=int(this_cutoff * 1000), limit=num_candles_limit, params=params)
595
712
  if candles and len(candles)>0:
@@ -623,7 +740,7 @@ def fetch_deribit_btc_option_expiries(
623
740
  Dict[str, Dict[str, Union[str, float]]]
624
741
  ]
625
742
  ]:
626
- exchange = deribit()
743
+ exchange = ccxt.deribit()
627
744
  instruments = exchange.public_get_get_instruments({
628
745
  'currency': market,
629
746
  'kind': 'option',
@@ -679,7 +796,9 @@ def fetch_deribit_btc_option_expiries(
679
796
 
680
797
  def build_pair_candles(
681
798
  pd_candles1 : pd.DataFrame,
682
- pd_candles2 : pd.DataFrame
799
+ pd_candles2 : pd.DataFrame,
800
+ left_columns_postfix : str = "_1",
801
+ right_columns_postfix : str = "_2"
683
802
  ) -> pd.DataFrame:
684
803
  min_timestamp_ms1 = int(pd_candles1.iloc[0]['timestamp_ms'])
685
804
  max_timestamp_ms1 = int(pd_candles1.iloc[-1]['timestamp_ms'])
@@ -709,16 +828,16 @@ def build_pair_candles(
709
828
  assert(max_timestamp_ms1==max_timestamp_ms2)
710
829
  assert(pd_candles1.shape[0]==pd_candles2.shape[0])
711
830
 
712
- if len([ col for col in pd_candles1.columns if col[-2:]=='_1' ]) == 0:
713
- pd_candles1.columns = [str(col) + '_1' for col in pd_candles1.columns]
831
+ if len([ col for col in pd_candles1.columns if col[-2:]==left_columns_postfix ]) == 0:
832
+ pd_candles1.columns = [str(col) + left_columns_postfix for col in pd_candles1.columns]
714
833
 
715
- if len([ col for col in pd_candles2.columns if col[-2:]=='_2' ]) == 0:
716
- pd_candles2.columns = [str(col) + '_2' for col in pd_candles2.columns]
834
+ if len([ col for col in pd_candles2.columns if col[-2:]==right_columns_postfix ]) == 0:
835
+ pd_candles2.columns = [str(col) + right_columns_postfix for col in pd_candles2.columns]
717
836
 
718
837
  pd_candles1.reset_index(drop=True, inplace=True)
719
838
  pd_candles2.reset_index(drop=True, inplace=True)
720
839
  pd_candles = pd.concat([pd_candles1, pd_candles2], axis=1)
721
- pd_candles['timestamp_ms_gap'] = pd_candles['timestamp_ms_1'] - pd_candles['timestamp_ms_2']
840
+ pd_candles['timestamp_ms_gap'] = pd_candles[f'timestamp_ms{left_columns_postfix}'] - pd_candles[f'timestamp_ms{right_columns_postfix}']
722
841
  assert(pd_candles[pd_candles.timestamp_ms_gap!=0].shape[0]==0)
723
842
 
724
843
  pd_candles.drop(pd_candles.columns[pd_candles.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
@@ -2,7 +2,8 @@ import time
2
2
 
3
3
  def retry(
4
4
  num_attempts : int = 1,
5
- pause_between_retries_ms : int = 1000
5
+ pause_between_retries_ms : int = 1000,
6
+ logger = None
6
7
  ):
7
8
  def decorator(method):
8
9
  def wrapper(*args, **kw):
@@ -15,6 +16,10 @@ def retry(
15
16
  except Exception as retry_error:
16
17
  if i==(num_attempts-1):
17
18
  err_msg = f"retry_util.retry gave up {method.__name__} after {num_attempts} calls. {args} {kw}. {retry_error}"
19
+ if logger:
20
+ logger.error(err_msg)
21
+ else:
22
+ print(err_msg)
18
23
  raise Exception(err_msg) from retry_error
19
24
  finally:
20
25
  time.sleep(int(pause_between_retries_ms/1000))
@@ -32,6 +32,18 @@ def generate_rand_nums(
32
32
 
33
33
  return result
34
34
 
35
+ def compute_level_increment(
36
+ num : float,
37
+ level_granularity : float = 0.01
38
+ ) -> float:
39
+ if math.isnan(num):
40
+ return num
41
+ level_size = num * level_granularity
42
+ magnitude = math.floor(math.log10(abs(level_size)))
43
+ base_increment = 10 ** magnitude
44
+ rounded_level_size = round(level_size / base_increment) * base_increment
45
+ return rounded_level_size
46
+
35
47
  # https://norman-lm-fung.medium.com/levels-are-psychological-7176cdefb5f2
36
48
  def round_to_level(
37
49
  num : float,
@@ -39,13 +51,23 @@ def round_to_level(
39
51
  ) -> float:
40
52
  if math.isnan(num):
41
53
  return num
42
- level_size = num * level_granularity
43
- magnitude = math.floor(math.log10(abs(level_size)))
44
- base_increment = 10 ** magnitude
45
- rounded_level_size = round(level_size / base_increment) * base_increment
54
+ rounded_level_size = compute_level_increment(num, level_granularity)
46
55
  rounded_num = round(num / rounded_level_size) * rounded_level_size
47
56
  return rounded_num
48
57
 
58
+ def compute_adjacent_levels(
59
+ num : float,
60
+ level_granularity : float = 0.01,
61
+ num_levels_per_side : int = 1
62
+ ) -> Union[None, List[float]]:
63
+ if math.isnan(num):
64
+ return None
65
+ rounded_level_size = compute_level_increment(num, level_granularity)
66
+ rounded_num = round(num / rounded_level_size) * rounded_level_size
67
+ levels = [ rounded_num ]
68
+ levels = list(reversed([ rounded_num - (i+1)*rounded_level_size for i in list(range(num_levels_per_side))])) + levels + [ rounded_num + (i+1)*rounded_level_size for i in list(range(num_levels_per_side))]
69
+ return levels
70
+
49
71
  def bucket_series(
50
72
  values : List[float],
51
73
  outlier_threshold_percent : float = 0,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: siglab_py
3
- Version: 0.5.66
3
+ Version: 0.6.18
4
4
  Summary: Market data fetches, TA calculations and generic order gateway.
5
5
  Author: r0bbarh00d
6
6
  Author-email: r0bbarh00d <r0bbarh00d@gmail.com>
@@ -0,0 +1,50 @@
1
+ siglab_py/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ siglab_py/constants.py,sha256=RlWh0-_sXHanfPIVLgDPLNbn9ljv3aZJYMv92Zd0jZI,701
3
+ siglab_py/backtests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ siglab_py/backtests/backtest_core.py,sha256=LsK2H9bdPgR0Nyjw52Ama7S_jVbfeybKnyDefbBAC7g,162194
5
+ siglab_py/backtests/coinflip_15m_crypto.py,sha256=MKMksVLVER3pMykSaXlSVyR63IvxJuTZmUEp17SDTpk,15247
6
+ siglab_py/backtests/fibonacci_d_mv_crypto.py,sha256=WTAgumotdGd5VmWRWh7P1oXSgZMaV1ew42q2iXAno0s,20513
7
+ siglab_py/backtests/macdrsi_crosses_15m_tc_crypto.py,sha256=2qVBX2hqfW332rpp1_Nd9U5uQDoAXZ9qlPKBYTEL06E,16457
8
+ siglab_py/exchanges/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ siglab_py/exchanges/any_exchange.py,sha256=Y-zue75ZSmu9Ga1fONbjBGLNH5pDHQI01hCSjuLBkAk,889
10
+ siglab_py/exchanges/binance.py,sha256=3hLeU5t4AXVLS2u6V8wlqsvdo3Uh00ebgx4MONy8X6k,1394
11
+ siglab_py/exchanges/deribit.py,sha256=gI5ezORDeu-Z1S8yxJj1Kj43OroK3ga9BckNijkuk8A,3361
12
+ siglab_py/exchanges/futubull.py,sha256=i0jNgl9BGJvgEIDeOkTfD52FaK77vX8axCKNYwJHwSI,21144
13
+ siglab_py/market_data_providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ siglab_py/market_data_providers/aggregated_orderbook_provider.py,sha256=FZRobEBNRzcNGlOG3u38OVhmOZYlkNm8dVvR-S7Ii2g,23342
15
+ siglab_py/market_data_providers/candles_provider.py,sha256=FXvciwU7ONC0VdX-fggELIl2yqd7mRB2CjDeUsDzr-A,14145
16
+ siglab_py/market_data_providers/candles_ta_provider.py,sha256=az3tVjR4g0vhOCc8S5kGvjuNIpwxnXarUhMi62Z5Pzc,12013
17
+ siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py,sha256=DHj51QTbkCmEd9RFNVhWWpsSPz1aLd6zTLqkUUbEkK0,11158
18
+ siglab_py/market_data_providers/deribit_options_expiry_provider.py,sha256=e9Ee8TmC8pXaid8-jouSLKIpuW6_JBBgwRTieI665yQ,8684
19
+ siglab_py/market_data_providers/futu_candles_ta_to_csv.py,sha256=SCWlI_mOuErpGP8Kxh5WKEoff9cqqxO19oLFLd04bTs,10964
20
+ siglab_py/market_data_providers/google_monitor.py,sha256=B08Aj1urL4M9hVUfjubVwTsFhfsj5-eFaf36lYqZ8-o,14028
21
+ siglab_py/market_data_providers/orderbooks_provider.py,sha256=b1XgPVSTU3y0Dxm5DOKjeldK-0ZrP15_iG3fzjBBBG8,16376
22
+ siglab_py/market_data_providers/tg_monitor.py,sha256=LY4oRm5qQ_XiuLk0RMVDc0Vdsi6CKE6O9hgeGm3VXBM,21995
23
+ siglab_py/market_data_providers/trigger_provider.py,sha256=b9B1RSBI8bFyGsM4DVXhEY8w08iTy8sPZkTqoy72VJE,2505
24
+ siglab_py/ordergateway/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
+ siglab_py/ordergateway/client.py,sha256=LvtrYirrdFOcKgTkvuqwdEN7r3nurjX320ESnk7tHE0,15095
26
+ siglab_py/ordergateway/encrypt_keys_util.py,sha256=U_M-jPrPYOTO_sU0bMVkO5ruNXge5vek8yUGa8jaE-g,1349
27
+ siglab_py/ordergateway/gateway.py,sha256=KAulWLZf8UYFo0esWwb4H8NmGnylkkckx4xBcF6-IZQ,47343
28
+ siglab_py/ordergateway/test_ordergateway.py,sha256=4PE2flp_soGcD3DrI7zJOzZndjkb6I5XaDrFNNq4Huo,4009
29
+ siglab_py/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
+ siglab_py/tests/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
+ siglab_py/tests/integration/market_data_util_tests.py,sha256=h6KQytMFj3et54sY-r58fYWazfCxCrscjJqkHfgEs5g,8762
32
+ siglab_py/tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
+ siglab_py/tests/unit/analytic_util_tests.py,sha256=tV7MsItlidwqdAQ_kD1000PsG4zIoQyQY1RdOn5XQAQ,5942
34
+ siglab_py/tests/unit/market_data_util_tests.py,sha256=A1y83itISmMJdn6wLpfwcr4tGola8wTf1D1xbelMvgw,2026
35
+ siglab_py/tests/unit/simple_math_tests.py,sha256=ZIRNjyLI1UtRjJ_vfz13WGCOxbU0ttR101rrjaG5WGM,10480
36
+ siglab_py/tests/unit/trading_util_tests.py,sha256=LiflZrduWXyLMbpSFQCaydA7jdJx3vFR-3KuKRRGhjQ,2927
37
+ siglab_py/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
+ siglab_py/util/analytic_util.py,sha256=KoRDlesLuYKkr0V0ytJLEqaDHh6Tyfr6GhaPQYFCU2U,65225
39
+ siglab_py/util/aws_util.py,sha256=KGmjHrr1rpnnxr33nXHNzTul4tvyyxl9p6gpwNv0Ygc,2557
40
+ siglab_py/util/datetime_util.py,sha256=3yijKKhnvC1vJME-NsZxFn6hu2ogLsTlXdt2DoIyQVw,1590
41
+ siglab_py/util/market_data_util.py,sha256=ReutR6IjL7kyNKwftB-RMu5P9DUfVD0UHgzS-qwaHEk,36341
42
+ siglab_py/util/notification_util.py,sha256=tNZMUkkjz4q1CKqcQq62oEmZgHgNIwz2Iw9J22V22Zw,2668
43
+ siglab_py/util/retry_util.py,sha256=Y244NHoN7eJoQWn7MStRz1BtLz6B61MZHAGhJ8gg2b8,1108
44
+ siglab_py/util/simple_math.py,sha256=PmSEwDvbnUIKS-ACY6wpO2vyMFFq5ZMIWmrcI15VoM8,9407
45
+ siglab_py/util/slack_notification_util.py,sha256=G27n-adbT3Q6oaHSMvu_Nom794rrda5PprSF-zvmzkM,1912
46
+ siglab_py/util/trading_util.py,sha256=dlIOzoMGnddLSFODcJ61EBH1Aeruq4IT2MsxIdFkV9I,5252
47
+ siglab_py-0.6.18.dist-info/METADATA,sha256=sxecpcSMw1HACVmeVgjuo0SUWhpAcq_hvvLRb5JmUXg,829
48
+ siglab_py-0.6.18.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
49
+ siglab_py-0.6.18.dist-info/top_level.txt,sha256=AbD4VR9OqmMOGlMJLkAVPGQMtUPIQv0t1BF5xmcLJSk,10
50
+ siglab_py-0.6.18.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,41 +0,0 @@
1
- siglab_py/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- siglab_py/constants.py,sha256=atSjvM0wv_f1wrzWHE0DcbUn6fqLmg-51BpfMJR1QB4,547
3
- siglab_py/exchanges/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- siglab_py/exchanges/any_exchange.py,sha256=Y-zue75ZSmu9Ga1fONbjBGLNH5pDHQI01hCSjuLBkAk,889
5
- siglab_py/exchanges/futubull.py,sha256=i0jNgl9BGJvgEIDeOkTfD52FaK77vX8axCKNYwJHwSI,21144
6
- siglab_py/market_data_providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- siglab_py/market_data_providers/aggregated_orderbook_provider.py,sha256=FZRobEBNRzcNGlOG3u38OVhmOZYlkNm8dVvR-S7Ii2g,23342
8
- siglab_py/market_data_providers/candles_provider.py,sha256=fqHJjlECsBiBlpgyywrc4gTgxiROPNzZM8KxQBB5cOg,14139
9
- siglab_py/market_data_providers/candles_ta_provider.py,sha256=uiAhbEZZdTF-YulBHpSLwabos5LHCKU91NTiTmpUc0w,12001
10
- siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py,sha256=DHj51QTbkCmEd9RFNVhWWpsSPz1aLd6zTLqkUUbEkK0,11158
11
- siglab_py/market_data_providers/deribit_options_expiry_provider.py,sha256=e9Ee8TmC8pXaid8-jouSLKIpuW6_JBBgwRTieI665yQ,8684
12
- siglab_py/market_data_providers/futu_candles_ta_to_csv.py,sha256=SCWlI_mOuErpGP8Kxh5WKEoff9cqqxO19oLFLd04bTs,10964
13
- siglab_py/market_data_providers/orderbooks_provider.py,sha256=olt-3LIkoyzQWfNNQRhJtKibLbkTutt_q_rCCTM7i1g,16216
14
- siglab_py/market_data_providers/test_provider.py,sha256=wBLCgcWjs7FGZJXWsNyn30lkOLa_cgpuvqRakMC0wbA,2221
15
- siglab_py/market_data_providers/tg_monitor.py,sha256=lRqONP0JAP223gyX37R-wCkhyGFKTvKkgmeDNoO3ay4,21813
16
- siglab_py/ordergateway/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- siglab_py/ordergateway/client.py,sha256=LvtrYirrdFOcKgTkvuqwdEN7r3nurjX320ESnk7tHE0,15095
18
- siglab_py/ordergateway/encrypt_keys_util.py,sha256=U_M-jPrPYOTO_sU0bMVkO5ruNXge5vek8yUGa8jaE-g,1349
19
- siglab_py/ordergateway/gateway.py,sha256=Z-BQ-Z9gXoNrKQHzRIy9R1mnCybf9QwWhHpqkSI7bBM,43901
20
- siglab_py/ordergateway/test_ordergateway.py,sha256=4PE2flp_soGcD3DrI7zJOzZndjkb6I5XaDrFNNq4Huo,4009
21
- siglab_py/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- siglab_py/tests/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- siglab_py/tests/integration/market_data_util_tests.py,sha256=p-RWIJZLyj0lAdfi4QTIeAttCm_e8mEVWFKh4OWuogU,7189
24
- siglab_py/tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- siglab_py/tests/unit/analytic_util_tests.py,sha256=90tC5wjaYtoma3DGwkQF7akAKkZ0R1LGDL8Jwna2TFg,5834
26
- siglab_py/tests/unit/market_data_util_tests.py,sha256=A1y83itISmMJdn6wLpfwcr4tGola8wTf1D1xbelMvgw,2026
27
- siglab_py/tests/unit/simple_math_tests.py,sha256=rWqq93W4Vlqmu0UeZCmSOfLirr0gPh2ASVIZ8O77qXY,9653
28
- siglab_py/tests/unit/trading_util_tests.py,sha256=LiflZrduWXyLMbpSFQCaydA7jdJx3vFR-3KuKRRGhjQ,2927
29
- siglab_py/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- siglab_py/util/analytic_util.py,sha256=0PcWMP8ZNYzDBMvFViwGetAbUZIwCQq3iX2iJYKOxyI,61859
31
- siglab_py/util/aws_util.py,sha256=KGmjHrr1rpnnxr33nXHNzTul4tvyyxl9p6gpwNv0Ygc,2557
32
- siglab_py/util/market_data_util.py,sha256=mUXg4uaiX3b6_klgJWIEgnUQU4IUd6CwTOqKLiQWRlU,31307
33
- siglab_py/util/notification_util.py,sha256=tNZMUkkjz4q1CKqcQq62oEmZgHgNIwz2Iw9J22V22Zw,2668
34
- siglab_py/util/retry_util.py,sha256=g-UU6pkPouWZZRZEqP99R2Z0lX5xzckYkzjwqqSDpVQ,922
35
- siglab_py/util/simple_math.py,sha256=F7vGj0O2Y9EAGcMFR6SN1tTjBWO_a7YZeiTzk3eHaVI,8518
36
- siglab_py/util/slack_notification_util.py,sha256=G27n-adbT3Q6oaHSMvu_Nom794rrda5PprSF-zvmzkM,1912
37
- siglab_py/util/trading_util.py,sha256=dlIOzoMGnddLSFODcJ61EBH1Aeruq4IT2MsxIdFkV9I,5252
38
- siglab_py-0.5.66.dist-info/METADATA,sha256=21-GAJelrVLQfGm_ZugZPVHnFpsFyz3xEtdgbaUyTyY,829
39
- siglab_py-0.5.66.dist-info/WHEEL,sha256=lTU6B6eIfYoiQJTZNc-fyaR6BpL6ehTzU3xGYxn2n8k,91
40
- siglab_py-0.5.66.dist-info/top_level.txt,sha256=AbD4VR9OqmMOGlMJLkAVPGQMtUPIQv0t1BF5xmcLJSk,10
41
- siglab_py-0.5.66.dist-info/RECORD,,