siglab-py 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siglab-py might be problematic. Click here for more details.
- siglab_py/__init__.py +0 -0
- siglab_py/constants.py +3 -0
- siglab_py/exchanges/__init__.py +0 -0
- siglab_py/exchanges/any_exchange.py +20 -0
- siglab_py/market_data_providers/__init__.py +0 -0
- siglab_py/market_data_providers/aggregated_orderbook_provider.py +451 -0
- siglab_py/market_data_providers/candles_provider.py +342 -0
- siglab_py/market_data_providers/candles_ta_provider.py +263 -0
- siglab_py/market_data_providers/deribit_options_expiry_provider.py +197 -0
- siglab_py/market_data_providers/orderbooks_provider.py +359 -0
- siglab_py/market_data_providers/test_provider.py +70 -0
- siglab_py/ordergateway/__init__.py +0 -0
- siglab_py/ordergateway/client.py +137 -0
- siglab_py/ordergateway/encrypt_keys_util.py +43 -0
- siglab_py/ordergateway/gateway.py +658 -0
- siglab_py/ordergateway/test_ordergateway.py +140 -0
- siglab_py/tests/__init__.py +0 -0
- siglab_py/tests/integration/__init__.py +0 -0
- siglab_py/tests/integration/market_data_util_tests.py +123 -0
- siglab_py/tests/unit/__init__.py +0 -0
- siglab_py/util/__init__.py +0 -0
- siglab_py/util/analytic_util.py +792 -0
- siglab_py/util/aws_util.py +47 -0
- siglab_py/util/market_data_util.py +385 -0
- siglab_py/util/retry_util.py +15 -0
- siglab_py-0.1.0.dist-info/METADATA +36 -0
- siglab_py-0.1.0.dist-info/RECORD +29 -0
- siglab_py-0.1.0.dist-info/WHEEL +5 -0
- siglab_py-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from typing import Union, Dict
|
|
2
|
+
|
|
3
|
+
'''
|
|
4
|
+
https://gist.github.com/raphaelgabbarelli/bc5a41d93789046f9c71e4685f1463e7
|
|
5
|
+
https://www.youtube.com/watch?v=CFEYKrP0vxs
|
|
6
|
+
'''
|
|
7
|
+
import boto3
|
|
8
|
+
import base64
|
|
9
|
+
|
|
10
|
+
class AwsKmsUtil(object):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
key_id : str,
|
|
14
|
+
profile_name : Union[str,None] = None
|
|
15
|
+
):
|
|
16
|
+
self.key_id = key_id
|
|
17
|
+
aws_session = boto3.session.Session(profile_name=profile_name) # type: ignore "session" is not a known attribute of module "boto3"
|
|
18
|
+
self.aws_kms_client = aws_session.client('kms')
|
|
19
|
+
|
|
20
|
+
def encrypt(self, plaintext : str) -> bytes:
|
|
21
|
+
encrypted = self.aws_kms_client.encrypt(KeyId=self.key_id, Plaintext=plaintext)
|
|
22
|
+
encrypted : bytes = base64.b64encode(encrypted['CiphertextBlob']) # type: ignore
|
|
23
|
+
return encrypted
|
|
24
|
+
|
|
25
|
+
def decrypt(self, encrypted : bytes) -> str:
|
|
26
|
+
decrypted : str = self.aws_kms_client.decrypt(CiphertextBlob=base64.b64decode(encrypted)) # type: ignore Cannot access attribute "ams_kms_client" for class "AwsKmsUtil*
|
|
27
|
+
return decrypted['Plaintext'].decode('utf-8') # type: ignore
|
|
28
|
+
|
|
29
|
+
if __name__ == "__main__":
|
|
30
|
+
'''
|
|
31
|
+
From command line, run 'aws configure' with IAM user's Access key ID and Secret access key. (Assume you have awscli installed)
|
|
32
|
+
aws configure
|
|
33
|
+
AWS Access Key ID [****************ABCD]: <-- ***ABCD is your IAM user
|
|
34
|
+
AWS Secret Access Key [****************xxx]: <-- xxx is password to your IAM user
|
|
35
|
+
Default region name [us-east-1]: <-- Region need be where your KMS key resides!
|
|
36
|
+
Default output format [None]:
|
|
37
|
+
|
|
38
|
+
Remember that when you create your KMS Key, you need to grant permission of the key newly created key to IAM user (This is done on KMS side, not IAM).
|
|
39
|
+
'''
|
|
40
|
+
key_id : str = "" # Enter your KMS key ID here. You'd find it from under AWS > KMS > Customer managed keys
|
|
41
|
+
original : str = "some secret"
|
|
42
|
+
|
|
43
|
+
aws_kms = AwsKmsUtil(key_id=key_id, profile_name=None)
|
|
44
|
+
encrypted = aws_kms.encrypt(original)
|
|
45
|
+
decrpted = aws_kms.decrypt(encrypted)
|
|
46
|
+
|
|
47
|
+
print(f"original: {original}, encrypted: {encrypted.decode('utf-8')}, decrpted: {decrpted}") # type: ignore
|
|
@@ -0,0 +1,385 @@
|
|
|
1
|
+
import tzlocal
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from typing import List, Dict, Union, NoReturn, Any, Tuple
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import math
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import numpy as np
|
|
8
|
+
|
|
9
|
+
from ccxt.base.exchange import Exchange as CcxtExchange
|
|
10
|
+
from ccxt import deribit
|
|
11
|
+
|
|
12
|
+
# https://www.analyticsvidhya.com/blog/2021/06/download-financial-dataset-using-yahoo-finance-in-python-a-complete-guide/
|
|
13
|
+
from yahoofinancials import YahooFinancials
|
|
14
|
+
|
|
15
|
+
# yfinance allows intervals '1m', '5m', '15m', '1h', '1d', '1wk', '1mo'. yahoofinancials not as flexible
|
|
16
|
+
import yfinance as yf
|
|
17
|
+
|
|
18
|
+
def timestamp_to_datetime_cols(pd_candles : pd.DataFrame):
|
|
19
|
+
pd_candles['datetime'] = pd_candles['timestamp_ms'].apply(
|
|
20
|
+
lambda x: datetime.fromtimestamp(int(x.timestamp()) if isinstance(x, pd.Timestamp) else int(x / 1000))
|
|
21
|
+
)
|
|
22
|
+
pd_candles['datetime'] = pd.to_datetime(pd_candles['datetime'])
|
|
23
|
+
pd_candles['datetime'] = pd_candles['datetime'].dt.tz_localize(None)
|
|
24
|
+
pd_candles['datetime_utc'] = pd_candles['timestamp_ms'].apply(
|
|
25
|
+
lambda x: datetime.fromtimestamp(int(x.timestamp()) if isinstance(x, pd.Timestamp) else int(x / 1000), tz=timezone.utc)
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
# This is to make it easy to do grouping with Excel pivot table
|
|
29
|
+
pd_candles['year'] = pd_candles['datetime'].dt.year
|
|
30
|
+
pd_candles['month'] = pd_candles['datetime'].dt.month
|
|
31
|
+
pd_candles['day'] = pd_candles['datetime'].dt.day
|
|
32
|
+
pd_candles['hour'] = pd_candles['datetime'].dt.hour
|
|
33
|
+
pd_candles['minute'] = pd_candles['datetime'].dt.minute
|
|
34
|
+
pd_candles['dayofweek'] = pd_candles['datetime'].dt.dayofweek # dayofweek: Monday is 0 and Sunday is 6
|
|
35
|
+
|
|
36
|
+
def fix_column_types(pd_candles : pd.DataFrame):
|
|
37
|
+
pd_candles['open'] = pd_candles['open'].astype(float)
|
|
38
|
+
pd_candles['high'] = pd_candles['high'].astype(float)
|
|
39
|
+
pd_candles['low'] = pd_candles['low'].astype(float)
|
|
40
|
+
pd_candles['close'] = pd_candles['close'].astype(float)
|
|
41
|
+
pd_candles['volume'] = pd_candles['volume'].astype(float)
|
|
42
|
+
|
|
43
|
+
timestamp_to_datetime_cols(pd_candles)
|
|
44
|
+
|
|
45
|
+
'''
|
|
46
|
+
The 'Unnamed: 0', 'Unnamed : 1'... etc columns often appears in a DataFrame when it is saved to a file (e.g., CSV or Excel) and later loaded.
|
|
47
|
+
This usually happens if the DataFrame's index was saved along with the data, and then pandas automatically treats it as a column during the file loading process.
|
|
48
|
+
We want to drop them as it'd mess up idmin, idmax calls, which will take values from 'Unnamed' instead of actual pandas index.
|
|
49
|
+
'''
|
|
50
|
+
pd_candles.drop(pd_candles.columns[pd_candles.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
|
|
51
|
+
pd_candles.reset_index(drop=True, inplace=True)
|
|
52
|
+
pd_candles.sort_values("datetime", inplace=True)
|
|
53
|
+
|
|
54
|
+
'''
|
|
55
|
+
https://polygon.io/docs/stocks
|
|
56
|
+
'''
|
|
57
|
+
class PolygonMarketDataProvider:
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
class NASDAQExchange:
|
|
61
|
+
def __init__(self, data_dir : Union[str, None]) -> None:
|
|
62
|
+
if data_dir:
|
|
63
|
+
self.data_dir = data_dir
|
|
64
|
+
else:
|
|
65
|
+
self.data_dir = Path(__file__).resolve().parents[2] / "data/nasdaq"
|
|
66
|
+
|
|
67
|
+
def fetch_ohlcv(
|
|
68
|
+
self,
|
|
69
|
+
symbol : str,
|
|
70
|
+
since : int,
|
|
71
|
+
timeframe : str,
|
|
72
|
+
limit : int = 1
|
|
73
|
+
) -> List:
|
|
74
|
+
pd_candles = self.fetch_candles(
|
|
75
|
+
symbols=[symbol],
|
|
76
|
+
start_ts=int(since/1000),
|
|
77
|
+
end_ts=None,
|
|
78
|
+
candle_size=timeframe
|
|
79
|
+
)[symbol]
|
|
80
|
+
if pd_candles is not None:
|
|
81
|
+
return pd_candles.values.tolist()
|
|
82
|
+
else:
|
|
83
|
+
return []
|
|
84
|
+
|
|
85
|
+
def fetch_candles(
|
|
86
|
+
self,
|
|
87
|
+
start_ts,
|
|
88
|
+
end_ts,
|
|
89
|
+
symbols,
|
|
90
|
+
candle_size
|
|
91
|
+
) -> Dict[str, Union[pd.DataFrame, None]]:
|
|
92
|
+
exchange_candles : Dict[str, Union[pd.DataFrame, None]] = {}
|
|
93
|
+
|
|
94
|
+
start_date = datetime.fromtimestamp(start_ts)
|
|
95
|
+
end_date = datetime.fromtimestamp(end_ts) if end_ts else None
|
|
96
|
+
start_date_str = start_date.strftime('%Y-%m-%d')
|
|
97
|
+
end_date_str = end_date.strftime('%Y-%m-%d') if end_date else None
|
|
98
|
+
local_tz = datetime.now().astimezone().tzinfo
|
|
99
|
+
|
|
100
|
+
for symbol in symbols:
|
|
101
|
+
# CSV from NASDAQ: https://www.nasdaq.com/market-activity/quotes/historical
|
|
102
|
+
pd_daily_candles = pd.read_csv(f"{self.data_dir}\\NASDAQ_hist_{symbol.replace('^','')}.csv")
|
|
103
|
+
pd_daily_candles.rename(columns={'Date' : 'datetime', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close/Last' : 'close', 'Adj Close' : 'adj_close', 'Volume' : 'volume' }, inplace=True)
|
|
104
|
+
pd_daily_candles['open'] = pd_daily_candles['open'].astype(str).str.replace('$','')
|
|
105
|
+
pd_daily_candles['high'] = pd_daily_candles['high'].astype(str).str.replace('$','')
|
|
106
|
+
pd_daily_candles['low'] = pd_daily_candles['low'].astype(str).str.replace('$','')
|
|
107
|
+
pd_daily_candles['close'] = pd_daily_candles['close'].astype(str).str.replace('$','')
|
|
108
|
+
pd_daily_candles['datetime']= pd.to_datetime(pd_daily_candles['datetime'])
|
|
109
|
+
pd_daily_candles['timestamp_ms'] = pd_daily_candles.datetime.values.astype(np.int64) // 10 ** 6
|
|
110
|
+
pd_daily_candles['symbol'] = symbol
|
|
111
|
+
pd_daily_candles['exchange'] = 'nasdaq'
|
|
112
|
+
fix_column_types(pd_daily_candles)
|
|
113
|
+
|
|
114
|
+
if candle_size=="1h":
|
|
115
|
+
# Fill forward (i.e. you dont actually have hourly candles)
|
|
116
|
+
start = pd_daily_candles["datetime"].min().normalize()
|
|
117
|
+
end = pd_daily_candles["datetime"].max().normalize() + pd.Timedelta(days=1)
|
|
118
|
+
hourly_index = pd.date_range(start=start, end=end, freq="h") # FutureWarning: 'H' is deprecated and will be removed in a future version, please use 'h' instead.
|
|
119
|
+
pd_hourly_candles = pd.DataFrame({"datetime": hourly_index})
|
|
120
|
+
pd_hourly_candles = pd.merge_asof(
|
|
121
|
+
pd_hourly_candles.sort_values("datetime"),
|
|
122
|
+
pd_daily_candles.sort_values("datetime"),
|
|
123
|
+
on="datetime",
|
|
124
|
+
direction="backward"
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# When you fill foward, a few candles before start date can have null values (open, high, low, close, volume ...)
|
|
128
|
+
first_candle_dt = pd_hourly_candles[(~pd_hourly_candles.close.isna()) & (pd_hourly_candles['datetime'].dt.time == pd.Timestamp('00:00:00').time())].iloc[0]['datetime']
|
|
129
|
+
pd_hourly_candles = pd_hourly_candles[pd_hourly_candles.datetime>=first_candle_dt]
|
|
130
|
+
exchange_candles[symbol] = pd_hourly_candles
|
|
131
|
+
|
|
132
|
+
elif candle_size=="1d":
|
|
133
|
+
exchange_candles[symbol] = pd_daily_candles
|
|
134
|
+
|
|
135
|
+
return exchange_candles
|
|
136
|
+
|
|
137
|
+
class YahooExchange:
|
|
138
|
+
def fetch_ohlcv(
|
|
139
|
+
self,
|
|
140
|
+
symbol : str,
|
|
141
|
+
since : int,
|
|
142
|
+
timeframe : str,
|
|
143
|
+
limit : int = 1
|
|
144
|
+
) -> List:
|
|
145
|
+
pd_candles = self.fetch_candles(
|
|
146
|
+
symbols=[symbol],
|
|
147
|
+
start_ts=int(since/1000),
|
|
148
|
+
end_ts=None,
|
|
149
|
+
candle_size=timeframe
|
|
150
|
+
)[symbol]
|
|
151
|
+
if pd_candles is not None:
|
|
152
|
+
return pd_candles.values.tolist()
|
|
153
|
+
else:
|
|
154
|
+
return []
|
|
155
|
+
|
|
156
|
+
def fetch_candles(
|
|
157
|
+
self,
|
|
158
|
+
start_ts,
|
|
159
|
+
end_ts,
|
|
160
|
+
symbols,
|
|
161
|
+
candle_size
|
|
162
|
+
) -> Dict[str, Union[pd.DataFrame, None]]:
|
|
163
|
+
exchange_candles : Dict[str, Union[pd.DataFrame, None]] = {}
|
|
164
|
+
|
|
165
|
+
start_date = datetime.fromtimestamp(start_ts)
|
|
166
|
+
end_date = datetime.fromtimestamp(end_ts)
|
|
167
|
+
start_date_str = start_date.strftime('%Y-%m-%d')
|
|
168
|
+
end_date_str = end_date.strftime('%Y-%m-%d')
|
|
169
|
+
local_tz = datetime.now().astimezone().tzinfo
|
|
170
|
+
|
|
171
|
+
for symbol in symbols:
|
|
172
|
+
# From yf, "DateTime" in UTC
|
|
173
|
+
# The requested range must be within the last 730 days. Otherwise API will return empty DataFrame.
|
|
174
|
+
pd_candles = yf.download(tickers=symbol, start=start_date_str, end=end_date_str, interval=candle_size)
|
|
175
|
+
pd_candles.reset_index(inplace=True)
|
|
176
|
+
pd_candles.rename(columns={ 'Date' : 'datetime', 'Datetime' : 'datetime', 'Open': 'open', 'High': 'high', 'Low': 'low', 'Close' : 'close', 'Adj Close' : 'adj_close', 'Volume' : 'volume' }, inplace=True)
|
|
177
|
+
pd_candles['datetime'] = pd.to_datetime(pd_candles['datetime'])
|
|
178
|
+
if pd_candles['datetime'].dt.tz is None:
|
|
179
|
+
pd_candles['datetime'] = pd.to_datetime(pd_candles['datetime']).dt.tz_localize('UTC')
|
|
180
|
+
pd_candles['datetime'] = pd_candles['datetime'].dt.tz_convert(local_tz)
|
|
181
|
+
pd_candles['datetime'] = pd_candles['datetime'].dt.tz_localize(None)
|
|
182
|
+
pd_candles['timestamp_ms'] = pd_candles.datetime.values.astype(np.int64) // 10**6
|
|
183
|
+
pd_candles = pd_candles.sort_values(by=['timestamp_ms'], ascending=[True])
|
|
184
|
+
|
|
185
|
+
fix_column_types(pd_candles)
|
|
186
|
+
pd_candles['symbol'] = symbol
|
|
187
|
+
pd_candles['exchange'] = 'yahoo'
|
|
188
|
+
exchange_candles[symbol] = pd_candles
|
|
189
|
+
|
|
190
|
+
return exchange_candles
|
|
191
|
+
|
|
192
|
+
def fetch_historical_price(
|
|
193
|
+
exchange,
|
|
194
|
+
normalized_symbol : str,
|
|
195
|
+
timestamp_ms : int,
|
|
196
|
+
ref_timeframe : str = '1m'
|
|
197
|
+
):
|
|
198
|
+
one_candle = fetch_ohlcv_one_candle(exchange=exchange, normalized_symbol=normalized_symbol, timestamp_ms=timestamp_ms, ref_timeframe=ref_timeframe)
|
|
199
|
+
reference_price = abs(one_candle['close'] + one_candle['open'])/2 if one_candle else None
|
|
200
|
+
return reference_price
|
|
201
|
+
|
|
202
|
+
def fetch_ohlcv_one_candle(
|
|
203
|
+
exchange,
|
|
204
|
+
normalized_symbol : str,
|
|
205
|
+
timestamp_ms : int,
|
|
206
|
+
ref_timeframe : str = '1m'
|
|
207
|
+
):
|
|
208
|
+
candles = exchange.fetch_ohlcv(symbol=normalized_symbol, since=int(timestamp_ms), timeframe=ref_timeframe, limit=1)
|
|
209
|
+
one_candle = {
|
|
210
|
+
'timestamp_ms' : candles[0][0],
|
|
211
|
+
'open' : candles[0][1],
|
|
212
|
+
'high' : candles[0][2],
|
|
213
|
+
'low' : candles[0][3],
|
|
214
|
+
'close' : candles[0][4],
|
|
215
|
+
'volume' : candles[0][5]
|
|
216
|
+
} if candles and len(candles)>0 else None
|
|
217
|
+
|
|
218
|
+
return one_candle
|
|
219
|
+
|
|
220
|
+
def fetch_candles(
|
|
221
|
+
start_ts, # in sec
|
|
222
|
+
end_ts, # in sec
|
|
223
|
+
exchange,
|
|
224
|
+
normalized_symbols,
|
|
225
|
+
candle_size,
|
|
226
|
+
|
|
227
|
+
logger = None,
|
|
228
|
+
|
|
229
|
+
num_candles_limit : int = 100,
|
|
230
|
+
|
|
231
|
+
cache_dir : Union[str, None] = None,
|
|
232
|
+
|
|
233
|
+
list_ts_field : Union[str, None] = None,
|
|
234
|
+
|
|
235
|
+
validation_max_gaps : int = 10,
|
|
236
|
+
validation_max_end_date_intervals : int = 1
|
|
237
|
+
) -> Dict[str, Union[pd.DataFrame, None]]:
|
|
238
|
+
if type(exchange) is YahooExchange:
|
|
239
|
+
return exchange.fetch_candles(
|
|
240
|
+
start_ts=start_ts,
|
|
241
|
+
end_ts=end_ts,
|
|
242
|
+
symbols=normalized_symbols,
|
|
243
|
+
candle_size=candle_size
|
|
244
|
+
)
|
|
245
|
+
elif type(exchange) is NASDAQExchange:
|
|
246
|
+
return exchange.fetch_candles(
|
|
247
|
+
start_ts=start_ts,
|
|
248
|
+
end_ts=end_ts,
|
|
249
|
+
symbols=normalized_symbols,
|
|
250
|
+
candle_size=candle_size
|
|
251
|
+
)
|
|
252
|
+
elif issubclass(exchange.__class__, CcxtExchange):
|
|
253
|
+
return _fetch_candles_ccxt(
|
|
254
|
+
start_ts=start_ts,
|
|
255
|
+
end_ts=end_ts,
|
|
256
|
+
exchange=exchange,
|
|
257
|
+
normalized_symbols=normalized_symbols,
|
|
258
|
+
candle_size=candle_size,
|
|
259
|
+
logger=logger,
|
|
260
|
+
num_candles_limit=num_candles_limit,
|
|
261
|
+
cache_dir=cache_dir,
|
|
262
|
+
list_ts_field=list_ts_field
|
|
263
|
+
)
|
|
264
|
+
return { '' : None }
|
|
265
|
+
|
|
266
|
+
def _fetch_candles_ccxt(
|
|
267
|
+
start_ts : int,
|
|
268
|
+
end_ts : int,
|
|
269
|
+
exchange,
|
|
270
|
+
normalized_symbols : List[str],
|
|
271
|
+
candle_size : str,
|
|
272
|
+
num_candles_limit : int = 100,
|
|
273
|
+
logger = None,
|
|
274
|
+
cache_dir : Union[str, None] = None,
|
|
275
|
+
list_ts_field : Union[str, None] = None
|
|
276
|
+
) -> Dict[str, Union[pd.DataFrame, None]]:
|
|
277
|
+
ticker = normalized_symbols[0]
|
|
278
|
+
pd_candles = _fetch_candles(
|
|
279
|
+
symbol = ticker,
|
|
280
|
+
exchange = exchange,
|
|
281
|
+
start_ts = start_ts,
|
|
282
|
+
end_ts = end_ts,
|
|
283
|
+
candle_size = candle_size,
|
|
284
|
+
)
|
|
285
|
+
return {
|
|
286
|
+
ticker : pd_candles
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
def _fetch_candles(
|
|
290
|
+
symbol : str,
|
|
291
|
+
exchange : CcxtExchange,
|
|
292
|
+
start_ts : int,
|
|
293
|
+
end_ts : int,
|
|
294
|
+
candle_size : str = '1d',
|
|
295
|
+
num_candles_limit : int = 100
|
|
296
|
+
):
|
|
297
|
+
def _fetch_ohlcv(exchange, symbol, timeframe, since, limit, params) -> Union[List, NoReturn]:
|
|
298
|
+
one_timeframe = f"1{timeframe[-1]}"
|
|
299
|
+
candles = exchange.fetch_ohlcv(symbol=symbol, timeframe=one_timeframe, since=since, limit=limit, params=params)
|
|
300
|
+
if candles and len(candles)>0:
|
|
301
|
+
candles.sort(key=lambda x : x[0], reverse=False)
|
|
302
|
+
|
|
303
|
+
return candles
|
|
304
|
+
|
|
305
|
+
all_candles = []
|
|
306
|
+
params = {}
|
|
307
|
+
this_cutoff = start_ts
|
|
308
|
+
while this_cutoff<=end_ts:
|
|
309
|
+
candles = _fetch_ohlcv(exchange=exchange, symbol=symbol, timeframe=candle_size, since=int(this_cutoff * 1000), limit=num_candles_limit, params=params)
|
|
310
|
+
if candles and len(candles)>0:
|
|
311
|
+
all_candles = all_candles + [[ int(x[0]), float(x[1]), float(x[2]), float(x[3]), float(x[4]), float(x[5]) ] for x in candles if x[1] and x[2] and x[3] and x[4] and x[5] ]
|
|
312
|
+
|
|
313
|
+
record_ts = max([int(record[0]) for record in candles])
|
|
314
|
+
record_ts_str : str = str(record_ts)
|
|
315
|
+
if len(record_ts_str)==13:
|
|
316
|
+
record_ts = int(int(record_ts_str)/1000) # Convert from milli-seconds to seconds
|
|
317
|
+
|
|
318
|
+
this_cutoff = record_ts + 1
|
|
319
|
+
columns = ['exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume']
|
|
320
|
+
pd_all_candles = pd.DataFrame([ [ exchange.name, symbol, x[0], x[1], x[2], x[3], x[4], x[5] ] for x in all_candles], columns=columns)
|
|
321
|
+
fix_column_types(pd_all_candles)
|
|
322
|
+
pd_all_candles['pct_chg_on_close'] = pd_all_candles['close'].pct_change()
|
|
323
|
+
return pd_all_candles
|
|
324
|
+
|
|
325
|
+
def fetch_deribit_btc_option_expiries(
|
|
326
|
+
market: str = 'BTC'
|
|
327
|
+
) -> Dict[
|
|
328
|
+
str, Union[
|
|
329
|
+
Dict[str, float],
|
|
330
|
+
Dict[str, Dict[str, Union[str, float]]]
|
|
331
|
+
]
|
|
332
|
+
]:
|
|
333
|
+
exchange = deribit()
|
|
334
|
+
instruments = exchange.public_get_get_instruments({
|
|
335
|
+
'currency': market,
|
|
336
|
+
'kind': 'option',
|
|
337
|
+
# 'expired': 'true'
|
|
338
|
+
})['result']
|
|
339
|
+
|
|
340
|
+
index_price = exchange.public_get_get_index_price({
|
|
341
|
+
'index_name': f"{market.lower()}_usd"
|
|
342
|
+
})['result']['index_price']
|
|
343
|
+
index_price = float(index_price)
|
|
344
|
+
|
|
345
|
+
expiry_data : Dict[str, float] = {}
|
|
346
|
+
expiry_data_breakdown_by_strike : Dict[str, Dict] = {}
|
|
347
|
+
for instrument in instruments:
|
|
348
|
+
expiry_timestamp = int(instrument["expiration_timestamp"]) / 1000
|
|
349
|
+
expiry_date = datetime.utcfromtimestamp(expiry_timestamp)
|
|
350
|
+
|
|
351
|
+
strike = float(instrument['strike'])
|
|
352
|
+
|
|
353
|
+
option_type = instrument['instrument_name'].split('-')[-1] # Last part is 'C' or 'P'
|
|
354
|
+
is_call = option_type == 'C'
|
|
355
|
+
|
|
356
|
+
ticker = exchange.public_get_ticker({
|
|
357
|
+
'instrument_name': instrument['instrument_name']
|
|
358
|
+
})['result']
|
|
359
|
+
|
|
360
|
+
open_interest = ticker.get("open_interest", 0) # Open interest in BTC
|
|
361
|
+
open_interest = float(open_interest)
|
|
362
|
+
notional_value : float = open_interest * index_price # Convert to USD
|
|
363
|
+
|
|
364
|
+
expiry_str : str = expiry_date.strftime("%Y-%m-%d")
|
|
365
|
+
if expiry_str not in expiry_data:
|
|
366
|
+
expiry_data[expiry_str] = 0
|
|
367
|
+
expiry_data[expiry_str] += notional_value
|
|
368
|
+
|
|
369
|
+
if f"{expiry_str}-{strike}" not in expiry_data_breakdown_by_strike:
|
|
370
|
+
expiry_data_breakdown_by_strike[f"{expiry_str}-{strike}"] = {
|
|
371
|
+
'expiry' : expiry_str,
|
|
372
|
+
'strike' : strike,
|
|
373
|
+
'option_type': 'call' if is_call else 'put',
|
|
374
|
+
'notional_value' : notional_value
|
|
375
|
+
}
|
|
376
|
+
else:
|
|
377
|
+
expiry_data_breakdown_by_strike[f"{expiry_str}-{strike}"]['notional_value'] += notional_value
|
|
378
|
+
|
|
379
|
+
sorted_expiry_data = sorted(expiry_data.items())
|
|
380
|
+
|
|
381
|
+
return {
|
|
382
|
+
'index_price' : index_price,
|
|
383
|
+
'by_expiry' : sorted_expiry_data, # type: ignore Otherwise, Error: Type "dict[str, list[tuple[str, float]] | dict[str, Dict[Unknown, Unknown]]]" is not assignable to return type "Dict[str, Dict[str, float] | Dict[str, Dict[str, str | float]]]"
|
|
384
|
+
'by_expiry_and_strike' : expiry_data_breakdown_by_strike
|
|
385
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
def retry(num_attempts : int = 1):
|
|
2
|
+
def decorator(method):
|
|
3
|
+
def wrapper(*args, **kw):
|
|
4
|
+
for i in range(num_attempts):
|
|
5
|
+
try:
|
|
6
|
+
result = method(*args, **kw)
|
|
7
|
+
if i>0:
|
|
8
|
+
print(f"retry_gizmo.retry succeeded: {method.__name__} on #{i+1} invocation. {args} {kw}")
|
|
9
|
+
return result
|
|
10
|
+
except Exception as retry_error:
|
|
11
|
+
if i==(num_attempts-1):
|
|
12
|
+
err_msg = f"retry_gizmo.retry failed: {method.__name__} after {num_attempts} invocations. {args} {kw}. {retry_error}"
|
|
13
|
+
raise Exception(err_msg) from retry_error
|
|
14
|
+
return wrapper
|
|
15
|
+
return decorator
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: siglab-py
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Market data fetches, TA calculations and generic order gateway.
|
|
5
|
+
Author: r0bbarh00d
|
|
6
|
+
Author-email: r0bbarh00d <r0bbarh00d@gmail.com>
|
|
7
|
+
License: MIT
|
|
8
|
+
Project-URL: Homepage, https://github.com/r0bbar/siglab/blob/master/siglab_py/README.md
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
11
|
+
Classifier: Operating System :: OS Independent
|
|
12
|
+
Requires-Python: >=3.9.19
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
Requires-Dist: python-dotenv
|
|
15
|
+
Requires-Dist: dotmap
|
|
16
|
+
Requires-Dist: typing-extensions
|
|
17
|
+
Requires-Dist: arrow
|
|
18
|
+
Requires-Dist: tzlocal
|
|
19
|
+
Requires-Dist: nest-asyncio
|
|
20
|
+
Requires-Dist: pandas
|
|
21
|
+
Requires-Dist: numpy
|
|
22
|
+
Requires-Dist: boto3
|
|
23
|
+
Requires-Dist: web3
|
|
24
|
+
Requires-Dist: ccxt
|
|
25
|
+
Requires-Dist: ccxtpro
|
|
26
|
+
Requires-Dist: yfinance
|
|
27
|
+
Requires-Dist: yahoofinancials
|
|
28
|
+
Requires-Dist: scipy
|
|
29
|
+
Requires-Dist: statsmodels
|
|
30
|
+
Requires-Dist: scikit-learn
|
|
31
|
+
Requires-Dist: sklearn.preprocessing
|
|
32
|
+
Requires-Dist: hurst
|
|
33
|
+
Requires-Dist: redis
|
|
34
|
+
Requires-Dist: redis-py-cluster
|
|
35
|
+
Requires-Dist: kafka-python
|
|
36
|
+
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
siglab_py/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
siglab_py/constants.py,sha256=YGNdEsWtQ99V2oltaynZTjM8VIboSfyIjDXJKSlhv4U,132
|
|
3
|
+
siglab_py/exchanges/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
siglab_py/exchanges/any_exchange.py,sha256=blsB_itXEcr8-DxT4DI5n9v9htJGbU0OsW68fx6pIgo,727
|
|
5
|
+
siglab_py/market_data_providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
siglab_py/market_data_providers/aggregated_orderbook_provider.py,sha256=uN6oD6PqbfnkPKVzKF_GfSEuXdtmv0lFTB-yg9gRYcI,23234
|
|
7
|
+
siglab_py/market_data_providers/candles_provider.py,sha256=fqHJjlECsBiBlpgyywrc4gTgxiROPNzZM8KxQBB5cOg,14139
|
|
8
|
+
siglab_py/market_data_providers/candles_ta_provider.py,sha256=uiAhbEZZdTF-YulBHpSLwabos5LHCKU91NTiTmpUc0w,12001
|
|
9
|
+
siglab_py/market_data_providers/deribit_options_expiry_provider.py,sha256=mP-508KftjseOWvttjEOu6fXu7J7DPCYGoyeuY6I_mU,8585
|
|
10
|
+
siglab_py/market_data_providers/orderbooks_provider.py,sha256=cBp-HYCups2Uiwzw0SaUwxrg4unJvnm2TDqIK8f4hUg,15674
|
|
11
|
+
siglab_py/market_data_providers/test_provider.py,sha256=wBLCgcWjs7FGZJXWsNyn30lkOLa_cgpuvqRakMC0wbA,2221
|
|
12
|
+
siglab_py/ordergateway/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
+
siglab_py/ordergateway/client.py,sha256=fweIM9aqtWqL2ajrIZ7ChRY4aUmMoqHDLMviv9q7kpU,4995
|
|
14
|
+
siglab_py/ordergateway/encrypt_keys_util.py,sha256=-qi87db8To8Yf1WS1Q_Cp2Ya7ZqgWlRqSHfNXCM7wE4,1339
|
|
15
|
+
siglab_py/ordergateway/gateway.py,sha256=xlTThsNRRHYWilpc3zpAcYC2nxG4crfpDY8Refbnl64,30038
|
|
16
|
+
siglab_py/ordergateway/test_ordergateway.py,sha256=-uK3t82CPSAsO9Bt-GbN3PsbzKcr7lfkdusV3XrqV68,4708
|
|
17
|
+
siglab_py/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
siglab_py/tests/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
+
siglab_py/tests/integration/market_data_util_tests.py,sha256=_UyOMTTBW1VJ7rroyESoxmdhBCQzWSV2A14tAjV9J5U,5634
|
|
20
|
+
siglab_py/tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
|
+
siglab_py/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
+
siglab_py/util/analytic_util.py,sha256=QLabbEMqM4rKKH2PE_LqxIyo-BUdCRhkLybLATBImcc,43438
|
|
23
|
+
siglab_py/util/aws_util.py,sha256=x_duGDXJ6sO0wVpoRVDTMxECag7feA7zOwZweVLGl_w,2251
|
|
24
|
+
siglab_py/util/market_data_util.py,sha256=3qTq71xGvQXj0ORKJV50IN5FP_mCBF_gvdmlPyhdyco,16439
|
|
25
|
+
siglab_py/util/retry_util.py,sha256=mxYuRFZRZoaQQjENcwPmxhxixtd1TFvbxIdPx4RwfRc,743
|
|
26
|
+
siglab_py-0.1.0.dist-info/METADATA,sha256=_Gc1E24YhCE6nKmfmi0diNm8-ZNqr8QTeItlDgxe5DM,1096
|
|
27
|
+
siglab_py-0.1.0.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
|
28
|
+
siglab_py-0.1.0.dist-info/top_level.txt,sha256=AbD4VR9OqmMOGlMJLkAVPGQMtUPIQv0t1BF5xmcLJSk,10
|
|
29
|
+
siglab_py-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
siglab_py
|