siglab-py 0.1.25__tar.gz → 0.1.26__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siglab-py might be problematic. Click here for more details.
- {siglab_py-0.1.25 → siglab_py-0.1.26}/PKG-INFO +1 -1
- {siglab_py-0.1.25 → siglab_py-0.1.26}/pyproject.toml +1 -1
- {siglab_py-0.1.25 → siglab_py-0.1.26}/setup.cfg +1 -1
- siglab_py-0.1.26/siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +227 -0
- siglab_py-0.1.25/siglab_py/market_data_providers/futu_candles.py → siglab_py-0.1.26/siglab_py/market_data_providers/futu_candles_ta_to_csv.py +34 -13
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/tests/integration/market_data_util_tests.py +5 -4
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/tests/unit/analytic_util_tests.py +1 -1
- siglab_py-0.1.26/siglab_py/tests/unit/market_data_util_tests.py +52 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/util/analytic_util.py +7 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/util/market_data_util.py +60 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py.egg-info/PKG-INFO +1 -1
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py.egg-info/SOURCES.txt +3 -1
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/constants.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/exchanges/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/exchanges/any_exchange.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/exchanges/futubull.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/aggregated_orderbook_provider.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/candles_provider.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/candles_ta_provider.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/deribit_options_expiry_provider.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/orderbooks_provider.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/test_provider.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/ordergateway/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/ordergateway/client.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/ordergateway/encrypt_keys_util.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/ordergateway/gateway.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/ordergateway/test_ordergateway.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/tests/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/tests/integration/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/tests/unit/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/util/__init__.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/util/aws_util.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/util/retry_util.py +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py.egg-info/dependency_links.txt +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py.egg-info/requires.txt +0 -0
- {siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py.egg-info/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "siglab_py"
|
|
7
|
-
version = "0.1.
|
|
7
|
+
version = "0.1.26"
|
|
8
8
|
description = "Market data fetches, TA calculations and generic order gateway."
|
|
9
9
|
authors = [{name = "r0bbarh00d", email = "r0bbarh00d@gmail.com"}]
|
|
10
10
|
license = {text = "MIT"}
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
from ctypes import ArgumentError
|
|
2
|
+
import sys
|
|
3
|
+
import logging
|
|
4
|
+
import argparse
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
import time
|
|
7
|
+
from typing import Dict, Union, Any
|
|
8
|
+
from enum import Enum
|
|
9
|
+
import asyncio
|
|
10
|
+
import pandas as pd
|
|
11
|
+
|
|
12
|
+
from ccxt.base.exchange import Exchange as CCXTExchange
|
|
13
|
+
from ccxt.binance import binance
|
|
14
|
+
from ccxt.bybit import bybit
|
|
15
|
+
from ccxt.okx import okx
|
|
16
|
+
from ccxt.deribit import deribit
|
|
17
|
+
from ccxt.kraken import kraken
|
|
18
|
+
from ccxt.hyperliquid import hyperliquid
|
|
19
|
+
|
|
20
|
+
from siglab_py.exchanges.futubull import Futubull
|
|
21
|
+
from siglab_py.util.market_data_util import fetch_candles
|
|
22
|
+
# from util.market_data_util import fetch_candles # For debug only
|
|
23
|
+
from siglab_py.util.analytic_util import compute_candles_stats
|
|
24
|
+
# from util.analytic_util import compute_candles_stats # For debug only
|
|
25
|
+
|
|
26
|
+
'''
|
|
27
|
+
Usage:
|
|
28
|
+
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
29
|
+
python ccxt_candles_ta_to_csv.py --exchange_name bybit --symbol BTC/USDT:USDT --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --default_type linear --compute_ta Y --pypy_compatible N
|
|
30
|
+
|
|
31
|
+
(Remember: python -mpip install siglab_py)
|
|
32
|
+
|
|
33
|
+
This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
|
|
34
|
+
pypy ccxt_candles_ta_to_csv.py --exchange_name bybit --symbol BTC/USDT:USDT --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --default_type linear --compute_ta Y --pypy_compatible Y
|
|
35
|
+
|
|
36
|
+
(Remember: pypy -mpip install siglab_py)
|
|
37
|
+
|
|
38
|
+
If debugging from VSCode, launch.json:
|
|
39
|
+
|
|
40
|
+
{
|
|
41
|
+
"version": "0.2.0",
|
|
42
|
+
"configurations": [
|
|
43
|
+
{
|
|
44
|
+
"name": "Python Debugger: Current File",
|
|
45
|
+
"type": "debugpy",
|
|
46
|
+
"request": "launch",
|
|
47
|
+
"program": "${file}",
|
|
48
|
+
"console": "integratedTerminal",
|
|
49
|
+
"args" : [
|
|
50
|
+
"--exchange_name", "bybit",
|
|
51
|
+
"--symbol", "BTC/USDT:USDT",
|
|
52
|
+
"--end_date", "2025-03-11 0:0:0",
|
|
53
|
+
"--start_date", "2024-03-11 0:0:0",
|
|
54
|
+
"--default_type", "linear",
|
|
55
|
+
"--compute_ta", "Y",
|
|
56
|
+
"--pypy_compatible", "N"
|
|
57
|
+
],
|
|
58
|
+
"env": {
|
|
59
|
+
"PYTHONPATH": "${workspaceFolder}"
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
]
|
|
63
|
+
}
|
|
64
|
+
'''
|
|
65
|
+
end_date : datetime = datetime.today()
|
|
66
|
+
end_date = datetime(end_date.year, end_date.month, end_date.day)
|
|
67
|
+
start_date : datetime = end_date - timedelta(days=365)
|
|
68
|
+
|
|
69
|
+
param : Dict = {
|
|
70
|
+
'exchange' : 'bybit',
|
|
71
|
+
'symbol' : None,
|
|
72
|
+
'start_date' : start_date,
|
|
73
|
+
'end_date' : end_date,
|
|
74
|
+
'exchange_params' : {
|
|
75
|
+
'rateLimit' : 100, # in ms
|
|
76
|
+
'options' : {
|
|
77
|
+
'defaultType' : "linear"
|
|
78
|
+
}
|
|
79
|
+
},
|
|
80
|
+
'output_filename' : 'candles_ta_$SYMBOL$.csv'
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
class LogLevel(Enum):
|
|
84
|
+
CRITICAL = 50
|
|
85
|
+
ERROR = 40
|
|
86
|
+
WARNING = 30
|
|
87
|
+
INFO = 20
|
|
88
|
+
DEBUG = 10
|
|
89
|
+
NOTSET = 0
|
|
90
|
+
|
|
91
|
+
logging.Formatter.converter = time.gmtime
|
|
92
|
+
logger = logging.getLogger()
|
|
93
|
+
log_level = logging.INFO # DEBUG --> INFO --> WARNING --> ERROR
|
|
94
|
+
logger.setLevel(log_level)
|
|
95
|
+
format_str = '%(asctime)s %(message)s'
|
|
96
|
+
formatter = logging.Formatter(format_str)
|
|
97
|
+
sh = logging.StreamHandler()
|
|
98
|
+
sh.setLevel(log_level)
|
|
99
|
+
sh.setFormatter(formatter)
|
|
100
|
+
logger.addHandler(sh)
|
|
101
|
+
|
|
102
|
+
def log(message : str, log_level : LogLevel = LogLevel.INFO):
|
|
103
|
+
if log_level.value<LogLevel.WARNING.value:
|
|
104
|
+
logger.info(f"{datetime.now()} {message}")
|
|
105
|
+
|
|
106
|
+
elif log_level.value==LogLevel.WARNING.value:
|
|
107
|
+
logger.warning(f"{datetime.now()} {message}")
|
|
108
|
+
|
|
109
|
+
elif log_level.value==LogLevel.ERROR.value:
|
|
110
|
+
logger.error(f"{datetime.now()} {message}")
|
|
111
|
+
|
|
112
|
+
def parse_args():
|
|
113
|
+
parser = argparse.ArgumentParser() # type: ignore
|
|
114
|
+
parser.add_argument("--exchange_name", help="Exchange name. bybit, okx, bybit, deribit, hyperliquid ...etc, add whatever you want top of script, import them. Then add to instantiate_exchange.", default="bybit")
|
|
115
|
+
parser.add_argument("--symbol", help="symbol, CEX example BTC/USDT for spot. BTC/USDT:USDT for perpetuals. Many DEXes offer USDC pairs.", default="BTC/USDT:USDT")
|
|
116
|
+
parser.add_argument("--start_date", help="Format: %Y-%m-%d %H:%M:%S", default=None)
|
|
117
|
+
parser.add_argument("--end_date", help="Format: %Y-%m-%d %H:%M:%S", default=None)
|
|
118
|
+
|
|
119
|
+
'''
|
|
120
|
+
Enums here:
|
|
121
|
+
https://openapi.futunn.com/futu-api-doc/en/quote/quote.html#66
|
|
122
|
+
https://openapi.futunn.com/futu-api-doc/en/trade/trade.html#9434
|
|
123
|
+
'''
|
|
124
|
+
parser.add_argument("--default_type", help="Depends on exchange. Very often, spot, linear/swap for perpetuals. Have a look at gateway.py instantiate_exchange https://github.com/r0bbar/siglab/blob/master/siglab_py/ordergateway/gateway.py", default="linear")
|
|
125
|
+
|
|
126
|
+
parser.add_argument("--compute_ta", help="Compute technical indicators?. Y or N (default).", default='N')
|
|
127
|
+
parser.add_argument("--candle_size", help="candle interval: 1m, 1h, 1d... etc", default='1h')
|
|
128
|
+
parser.add_argument("--ma_long_intervals", help="Sliding Window size in number of intervals for higher timeframe", default=24)
|
|
129
|
+
parser.add_argument("--ma_short_intervals", help="Sliding Window size in number of intervals for lower timeframe", default=8)
|
|
130
|
+
parser.add_argument("--boillenger_std_multiples", help="Boillenger bands: # std", default=2)
|
|
131
|
+
|
|
132
|
+
parser.add_argument("--pypy_compatible", help="pypy_compatible: If Y, analytic_util will import statsmodels.api (slopes and divergence calc). In any case, partition_sliding_window requires scipy.stats.linregress and cannot be used with pypy. Y or N (default).", default='N')
|
|
133
|
+
|
|
134
|
+
args = parser.parse_args()
|
|
135
|
+
param['exchange_name'] = args.exchange_name.strip().lower()
|
|
136
|
+
param['symbol'] = args.symbol.strip().upper()
|
|
137
|
+
|
|
138
|
+
param['start_date'] = datetime.strptime(args.start_date, "%Y-%m-%d %H:%M:%S") if args.start_date else start_date
|
|
139
|
+
param['end_date'] = datetime.strptime(args.end_date, "%Y-%m-%d %H:%M:%S") if args.end_date else end_date
|
|
140
|
+
|
|
141
|
+
param['exchange_params']['options']['defaultType'] = args.default_type
|
|
142
|
+
|
|
143
|
+
param['output_filename'] = param['output_filename'].replace('$SYMBOL$', param['symbol'].replace(":",".").replace("/","."))
|
|
144
|
+
|
|
145
|
+
if args.compute_ta:
|
|
146
|
+
if args.compute_ta=='Y':
|
|
147
|
+
param['compute_ta'] = True
|
|
148
|
+
else:
|
|
149
|
+
param['compute_ta'] = False
|
|
150
|
+
else:
|
|
151
|
+
param['compute_ta'] = False
|
|
152
|
+
param['candle_size'] = args.candle_size
|
|
153
|
+
param['ma_long_intervals'] = int(args.ma_long_intervals)
|
|
154
|
+
param['ma_short_intervals'] = int(args.ma_short_intervals)
|
|
155
|
+
param['boillenger_std_multiples'] = int(args.boillenger_std_multiples)
|
|
156
|
+
|
|
157
|
+
if args.pypy_compatible:
|
|
158
|
+
if args.pypy_compatible=='Y':
|
|
159
|
+
param['pypy_compatible'] = True
|
|
160
|
+
else:
|
|
161
|
+
param['pypy_compatible'] = False
|
|
162
|
+
else:
|
|
163
|
+
param['pypy_compatible'] = False
|
|
164
|
+
|
|
165
|
+
def instantiate_exchange(
|
|
166
|
+
exchange_name : str,
|
|
167
|
+
exchange_params : Dict[str, Any]
|
|
168
|
+
) -> CCXTExchange:
|
|
169
|
+
if exchange_name=='binance':
|
|
170
|
+
return binance(exchange_params)
|
|
171
|
+
elif exchange_name=='bybit':
|
|
172
|
+
return bybit(exchange_params)
|
|
173
|
+
elif exchange_name=='okx':
|
|
174
|
+
return okx(exchange_params)
|
|
175
|
+
elif exchange_name=='deribit':
|
|
176
|
+
return deribit(exchange_params)
|
|
177
|
+
else:
|
|
178
|
+
raise ArgumentError(f"Unsupported exchange {exchange_name}. Please import top of script and add to instantiate_exchange.")
|
|
179
|
+
|
|
180
|
+
async def main():
|
|
181
|
+
parse_args()
|
|
182
|
+
|
|
183
|
+
fh = logging.FileHandler(f"ccxt_candles_ta_to_csv.log")
|
|
184
|
+
fh.setLevel(log_level)
|
|
185
|
+
fh.setFormatter(formatter)
|
|
186
|
+
logger.addHandler(fh) # type: ignore
|
|
187
|
+
|
|
188
|
+
exchange = instantiate_exchange(param['exchange_name'], param['exchange_params'])
|
|
189
|
+
markets = exchange.load_markets()
|
|
190
|
+
if param['symbol'] not in markets:
|
|
191
|
+
raise ArgumentError(f"{param['symbol']} not support by {param['exchange_name']}")
|
|
192
|
+
|
|
193
|
+
pd_candles: Union[pd.DataFrame, None] = fetch_candles(
|
|
194
|
+
start_ts=int(param['start_date'].timestamp()),
|
|
195
|
+
end_ts=int(param['end_date'].timestamp()),
|
|
196
|
+
exchange=exchange,
|
|
197
|
+
normalized_symbols=[ param['symbol'] ],
|
|
198
|
+
candle_size=param['candle_size']
|
|
199
|
+
)[param['symbol']]
|
|
200
|
+
|
|
201
|
+
assert pd_candles is not None
|
|
202
|
+
|
|
203
|
+
if pd_candles is not None:
|
|
204
|
+
assert len(pd_candles) > 0, "No candles returned."
|
|
205
|
+
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
|
|
206
|
+
assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
|
|
207
|
+
assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
|
|
208
|
+
assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
|
|
209
|
+
|
|
210
|
+
if param['compute_ta']:
|
|
211
|
+
start = time.time()
|
|
212
|
+
compute_candles_stats(
|
|
213
|
+
pd_candles=pd_candles,
|
|
214
|
+
boillenger_std_multiples=param['boillenger_std_multiples'],
|
|
215
|
+
sliding_window_how_many_candles=param['ma_long_intervals'],
|
|
216
|
+
slow_fast_interval_ratio=(param['ma_long_intervals']/param['ma_short_intervals']),
|
|
217
|
+
pypy_compat=param['pypy_compatible']
|
|
218
|
+
)
|
|
219
|
+
compute_candles_stats_elapsed_ms = int((time.time() - start) *1000)
|
|
220
|
+
log(f"TA calculated, took {compute_candles_stats_elapsed_ms} ms")
|
|
221
|
+
|
|
222
|
+
log(f"Candles (# rows: {pd_candles.shape[0]}) written to {param['output_filename']}")
|
|
223
|
+
pd_candles.to_csv(param['output_filename'])
|
|
224
|
+
|
|
225
|
+
sys.exit()
|
|
226
|
+
|
|
227
|
+
asyncio.run(main())
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import sys
|
|
2
2
|
import logging
|
|
3
3
|
import argparse
|
|
4
|
-
from datetime import datetime
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
import time
|
|
5
6
|
from typing import Dict, Union
|
|
6
7
|
from enum import Enum
|
|
7
8
|
import asyncio
|
|
9
|
+
import pandas as pd
|
|
8
10
|
|
|
9
11
|
from futu import *
|
|
10
12
|
|
|
@@ -14,9 +16,17 @@ from siglab_py.util.analytic_util import compute_candles_stats
|
|
|
14
16
|
|
|
15
17
|
'''
|
|
16
18
|
Usage:
|
|
17
|
-
|
|
19
|
+
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
20
|
+
python futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
18
21
|
|
|
19
|
-
|
|
22
|
+
python futu_candles_ta_to_csv.py --symbol AAPL --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --market US --trdmarket US --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
23
|
+
|
|
24
|
+
(Remember: python -mpip install siglab_py)
|
|
25
|
+
|
|
26
|
+
This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
|
|
27
|
+
pypy futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible Y
|
|
28
|
+
|
|
29
|
+
(Remember: pypy -mpip install siglab_py)
|
|
20
30
|
|
|
21
31
|
If debugging from VSCode, launch.json:
|
|
22
32
|
|
|
@@ -37,7 +47,8 @@ If debugging from VSCode, launch.json:
|
|
|
37
47
|
"--trdmarket", "HK",
|
|
38
48
|
"--security_firm", "FUTUSECURITIES",
|
|
39
49
|
"--security_type", "STOCK",
|
|
40
|
-
"--compute_ta", "Y"
|
|
50
|
+
"--compute_ta", "Y",
|
|
51
|
+
"--pypy_compatible", "N"
|
|
41
52
|
],
|
|
42
53
|
"env": {
|
|
43
54
|
"PYTHONPATH": "${workspaceFolder}"
|
|
@@ -62,7 +73,7 @@ param : Dict = {
|
|
|
62
73
|
'host' : '127.0.0.1',
|
|
63
74
|
'port' : 11111
|
|
64
75
|
},
|
|
65
|
-
'output_filename' : '
|
|
76
|
+
'output_filename' : 'candles_ta_$SYMBOL$.csv'
|
|
66
77
|
}
|
|
67
78
|
|
|
68
79
|
class LogLevel(Enum):
|
|
@@ -112,10 +123,12 @@ def parse_args():
|
|
|
112
123
|
|
|
113
124
|
parser.add_argument("--compute_ta", help="Compute technical indicators?. Y or N (default).", default='N')
|
|
114
125
|
parser.add_argument("--candle_size", help="candle interval: 1m, 1h, 1d... etc", default='1h')
|
|
115
|
-
parser.add_argument("--ma_long_intervals", help="Window size in number of intervals for higher timeframe", default=24)
|
|
116
|
-
parser.add_argument("--ma_short_intervals", help="Window size in number of intervals for lower timeframe", default=8)
|
|
126
|
+
parser.add_argument("--ma_long_intervals", help="Sliding Window size in number of intervals for higher timeframe", default=24)
|
|
127
|
+
parser.add_argument("--ma_short_intervals", help="Sliding Window size in number of intervals for lower timeframe", default=8)
|
|
117
128
|
parser.add_argument("--boillenger_std_multiples", help="Boillenger bands: # std", default=2)
|
|
118
129
|
|
|
130
|
+
parser.add_argument("--pypy_compatible", help="pypy_compatible: If Y, analytic_util will import statsmodels.api (slopes and divergence calc). In any case, partition_sliding_window requires scipy.stats.linregress and cannot be used with pypy. Y or N (default).", default='N')
|
|
131
|
+
|
|
119
132
|
args = parser.parse_args()
|
|
120
133
|
param['symbol'] = args.symbol.strip().upper()
|
|
121
134
|
|
|
@@ -140,10 +153,18 @@ def parse_args():
|
|
|
140
153
|
param['ma_short_intervals'] = int(args.ma_short_intervals)
|
|
141
154
|
param['boillenger_std_multiples'] = int(args.boillenger_std_multiples)
|
|
142
155
|
|
|
156
|
+
if args.pypy_compatible:
|
|
157
|
+
if args.pypy_compatible=='Y':
|
|
158
|
+
param['pypy_compatible'] = True
|
|
159
|
+
else:
|
|
160
|
+
param['pypy_compatible'] = False
|
|
161
|
+
else:
|
|
162
|
+
param['pypy_compatible'] = False
|
|
163
|
+
|
|
143
164
|
async def main():
|
|
144
165
|
parse_args()
|
|
145
166
|
|
|
146
|
-
fh = logging.FileHandler(f"
|
|
167
|
+
fh = logging.FileHandler(f"futu_candles_ta_to_csv.log")
|
|
147
168
|
fh.setLevel(log_level)
|
|
148
169
|
fh.setFormatter(formatter)
|
|
149
170
|
logger.addHandler(fh) # type: ignore
|
|
@@ -151,18 +172,18 @@ async def main():
|
|
|
151
172
|
exchange = Futubull(param)
|
|
152
173
|
|
|
153
174
|
pd_candles: Union[pd.DataFrame, None] = fetch_candles(
|
|
154
|
-
start_ts=int(start_date.timestamp()),
|
|
155
|
-
end_ts=int(end_date.timestamp()),
|
|
175
|
+
start_ts=int(param['start_date'].timestamp()),
|
|
176
|
+
end_ts=int(param['end_date'].timestamp()),
|
|
156
177
|
exchange=exchange,
|
|
157
178
|
normalized_symbols=[ param['symbol'] ],
|
|
158
|
-
candle_size='
|
|
179
|
+
candle_size=param['candle_size']
|
|
159
180
|
)[param['symbol']]
|
|
160
181
|
|
|
161
182
|
assert pd_candles is not None
|
|
162
183
|
|
|
163
184
|
if pd_candles is not None:
|
|
164
185
|
assert len(pd_candles) > 0, "No candles returned."
|
|
165
|
-
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute'}
|
|
186
|
+
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
|
|
166
187
|
assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
|
|
167
188
|
assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
|
|
168
189
|
assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
|
|
@@ -174,7 +195,7 @@ async def main():
|
|
|
174
195
|
boillenger_std_multiples=param['boillenger_std_multiples'],
|
|
175
196
|
sliding_window_how_many_candles=param['ma_long_intervals'],
|
|
176
197
|
slow_fast_interval_ratio=(param['ma_long_intervals']/param['ma_short_intervals']),
|
|
177
|
-
pypy_compat=
|
|
198
|
+
pypy_compat=param['pypy_compatible']
|
|
178
199
|
)
|
|
179
200
|
compute_candles_stats_elapsed_ms = int((time.time() - start) *1000)
|
|
180
201
|
log(f"TA calculated, took {compute_candles_stats_elapsed_ms} ms")
|
|
@@ -36,7 +36,7 @@ class MarketDataUtilTests(unittest.TestCase):
|
|
|
36
36
|
|
|
37
37
|
if pd_candles is not None:
|
|
38
38
|
assert len(pd_candles) > 0, "No candles returned."
|
|
39
|
-
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute'}
|
|
39
|
+
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
|
|
40
40
|
assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
|
|
41
41
|
assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
|
|
42
42
|
assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
|
|
@@ -81,7 +81,7 @@ class MarketDataUtilTests(unittest.TestCase):
|
|
|
81
81
|
|
|
82
82
|
if pd_candles is not None:
|
|
83
83
|
assert len(pd_candles) > 0, "No candles returned."
|
|
84
|
-
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute'}
|
|
84
|
+
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
|
|
85
85
|
assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
|
|
86
86
|
assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
|
|
87
87
|
assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
|
|
@@ -114,12 +114,13 @@ class MarketDataUtilTests(unittest.TestCase):
|
|
|
114
114
|
|
|
115
115
|
if pd_candles is not None:
|
|
116
116
|
assert len(pd_candles) > 0, "No candles returned."
|
|
117
|
-
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute'}
|
|
117
|
+
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
|
|
118
118
|
assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
|
|
119
119
|
assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
|
|
120
120
|
assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
|
|
121
121
|
|
|
122
122
|
def test_fetch_candles_futubull(self):
|
|
123
|
+
# You need Futu OpenD running and you need entitlements
|
|
123
124
|
end_date : datetime = datetime.today()
|
|
124
125
|
start_date : datetime = end_date - timedelta(days=365*3)
|
|
125
126
|
|
|
@@ -148,7 +149,7 @@ class MarketDataUtilTests(unittest.TestCase):
|
|
|
148
149
|
|
|
149
150
|
if pd_candles is not None:
|
|
150
151
|
assert len(pd_candles) > 0, "No candles returned."
|
|
151
|
-
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute'}
|
|
152
|
+
expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
|
|
152
153
|
assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
|
|
153
154
|
assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
|
|
154
155
|
assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
|
|
@@ -41,5 +41,5 @@ class AnalyticUtilTests(unittest.TestCase):
|
|
|
41
41
|
pypy_compat=True
|
|
42
42
|
)
|
|
43
43
|
|
|
44
|
-
expected_columns : List[str] = ['exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime', 'datetime_utc', 'year', 'month', 'day', 'hour', 'minute', 'dayofweek', 'pct_chg_on_close', 'candle_height', 'is_green', 'pct_change_close', 'sma_short_periods', 'sma_long_periods', 'ema_short_periods', 'ema_long_periods', 'ema_close', 'std', 'ema_volume_short_periods', 'ema_volume_long_periods', 'max_short_periods', 'max_long_periods', 'idmax_short_periods', 'idmax_long_periods', 'min_short_periods', 'min_long_periods', 'idmin_short_periods', 'idmin_long_periods', 'h_l', 'h_pc', 'l_pc', 'tr', 'atr', 'hurst_exp', 'boillenger_upper', 'boillenger_lower', 'boillenger_channel_height', 'boillenger_upper_agg', 'boillenger_lower_agg', 'boillenger_channel_height_agg', 'aggressive_up', 'aggressive_up_index', 'aggressive_up_candle_height', 'aggressive_up_candle_high', 'aggressive_up_candle_low', 'aggressive_down', 'aggressive_down_index', 'aggressive_down_candle_height', 'aggressive_down_candle_high', 'aggressive_down_candle_low', 'fvg_low', 'fvg_high', 'fvg_gap', 'fvg_mitigated', 'close_delta', 'close_delta_percent', 'up', 'down', 'rsi', 'ema_rsi', 'typical_price', 'money_flow', 'money_flow_positive', 'money_flow_negative', 'positive_flow_sum', 'negative_flow_sum', 'money_flow_ratio', 'mfi', 'macd', 'signal', 'macd_minus_signal', 'fib_618_short_periods', 'fib_618_long_periods', 'gap_close_vs_ema', 'close_above_or_below_ema', 'close_vs_ema_inflection']
|
|
44
|
+
expected_columns : List[str] = ['exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime', 'datetime_utc', 'year', 'month', 'day', 'hour', 'minute', 'dayofweek', 'pct_chg_on_close', 'candle_height', 'is_green', 'pct_change_close', 'sma_short_periods', 'sma_long_periods', 'ema_short_periods', 'ema_long_periods', 'ema_close', 'std', 'candle_height_percent', 'chop_against_ema', 'ema_volume_short_periods', 'ema_volume_long_periods', 'max_short_periods', 'max_long_periods', 'idmax_short_periods', 'idmax_long_periods', 'min_short_periods', 'min_long_periods', 'idmin_short_periods', 'idmin_long_periods', 'h_l', 'h_pc', 'l_pc', 'tr', 'atr', 'hurst_exp', 'boillenger_upper', 'boillenger_lower', 'boillenger_channel_height', 'boillenger_upper_agg', 'boillenger_lower_agg', 'boillenger_channel_height_agg', 'aggressive_up', 'aggressive_up_index', 'aggressive_up_candle_height', 'aggressive_up_candle_high', 'aggressive_up_candle_low', 'aggressive_down', 'aggressive_down_index', 'aggressive_down_candle_height', 'aggressive_down_candle_high', 'aggressive_down_candle_low', 'fvg_low', 'fvg_high', 'fvg_gap', 'fvg_mitigated', 'close_delta', 'close_delta_percent', 'up', 'down', 'rsi', 'ema_rsi', 'typical_price', 'money_flow', 'money_flow_positive', 'money_flow_negative', 'positive_flow_sum', 'negative_flow_sum', 'money_flow_ratio', 'mfi', 'macd', 'signal', 'macd_minus_signal', 'fib_618_short_periods', 'fib_618_long_periods', 'gap_close_vs_ema', 'close_above_or_below_ema', 'close_vs_ema_inflection']
|
|
45
45
|
assert(pd_candles.columns.to_list()==expected_columns)
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
from typing import Union
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from util.market_data_util import *
|
|
7
|
+
|
|
8
|
+
from futu import *
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# @unittest.skip("Skip all integration tests.")
|
|
12
|
+
class MarketDataUtilTests(unittest.TestCase):
|
|
13
|
+
def test_timestamp_to_week_of_month(self):
|
|
14
|
+
test_timestamps = [
|
|
15
|
+
1672531200000, # 2023-01-01 (Week 0)
|
|
16
|
+
1673136000000, # 2023-01-08 (Week 1)
|
|
17
|
+
1673740800000, # 2023-01-15 (Week 2)
|
|
18
|
+
1674345600000, # 2023-01-22 (Week 3)
|
|
19
|
+
1674950400000, # 2023-01-29 (Week 4)
|
|
20
|
+
1675468800000, # 2023-02-01 (Week 0)
|
|
21
|
+
1676073600000, # 2023-02-08 (Week 1)
|
|
22
|
+
1676678400000, # 2023-02-15 (Week 2)
|
|
23
|
+
1677283200000, # 2023-02-22 (Week 3)
|
|
24
|
+
1677888000000, # 2023-03-01 (Week 0)
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
expectations = [0, 1, 2, 3, 4, 0, 1, 2, 3, 0]
|
|
28
|
+
|
|
29
|
+
for i, ts in enumerate(test_timestamps):
|
|
30
|
+
expectation = expectations[i]
|
|
31
|
+
actual = timestamp_to_week_of_month(ts)
|
|
32
|
+
assert expectation == actual, f"Test failed for timestamp {ts}. Expected: {expectation}, Actual: {actual}"
|
|
33
|
+
|
|
34
|
+
def test_timestamp_to_active_trading_regions(self):
|
|
35
|
+
test_timestamps = [
|
|
36
|
+
1672531200000, # 2023-01-01 00:00:00 UTC (APAC)
|
|
37
|
+
1672563600000, # 2023-01-01 09:00:00 UTC (APAC, EMEA)
|
|
38
|
+
1672574400000, # 2023-01-01 12:00:00 UTC (EMEA)
|
|
39
|
+
1672588800000, # 2023-01-01 16:00:00 UTC (EMEA, AMER)
|
|
40
|
+
1672599600000, # 2023-01-01 19:00:00 UTC (AMER)
|
|
41
|
+
1672610400000, # 2023-01-01 22:00:00 UTC (APAC)
|
|
42
|
+
]
|
|
43
|
+
|
|
44
|
+
expectations = [ ['APAC'], ['APAC', 'EMEA'], ['EMEA'], ['EMEA','AMER'], ['AMER'], ['APAC']]
|
|
45
|
+
|
|
46
|
+
i = 0
|
|
47
|
+
for ts in test_timestamps:
|
|
48
|
+
expectation = expectations[i]
|
|
49
|
+
actual = timestamp_to_active_trading_regions(ts)
|
|
50
|
+
assert(expectation==actual)
|
|
51
|
+
i+=1
|
|
52
|
+
|
|
@@ -94,6 +94,13 @@ def compute_candles_stats(
|
|
|
94
94
|
pd_candles['ema_close'] = pd_candles['ema_long_periods'] # Alias, shorter name
|
|
95
95
|
pd_candles['std'] = pd_candles['close'].rolling(window=sliding_window_how_many_candles).std()
|
|
96
96
|
|
|
97
|
+
pd_candles['candle_height_percent'] = pd_candles['candle_height'] / pd_candles['ema_close'] * 100
|
|
98
|
+
|
|
99
|
+
pd_candles['chop_against_ema'] = (
|
|
100
|
+
(pd_candles['is_green'] & (pd_candles['close'] > pd_candles['ema_close'])) | # Case 1: Green candle and close > EMA
|
|
101
|
+
(~pd_candles['is_green'] & (pd_candles['close'] < pd_candles['ema_close'])) # Case 2: Red candle and close < EMA
|
|
102
|
+
)
|
|
103
|
+
|
|
97
104
|
pd_candles['ema_volume_short_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles/slow_fast_interval_ratio, adjust=False).mean()
|
|
98
105
|
pd_candles['ema_volume_long_periods'] = pd_candles['volume'].ewm(span=sliding_window_how_many_candles, adjust=False).mean()
|
|
99
106
|
|
|
@@ -35,6 +35,66 @@ def timestamp_to_datetime_cols(pd_candles : pd.DataFrame):
|
|
|
35
35
|
pd_candles['minute'] = pd_candles['datetime'].dt.minute
|
|
36
36
|
pd_candles['dayofweek'] = pd_candles['datetime'].dt.dayofweek # dayofweek: Monday is 0 and Sunday is 6
|
|
37
37
|
|
|
38
|
+
pd_candles['week_of_month'] = pd_candles['timestamp_ms'].apply(
|
|
39
|
+
lambda x: timestamp_to_week_of_month(x)
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
pd_candles['apac_trading_hr'] = pd_candles['timestamp_ms'].apply(
|
|
43
|
+
lambda x: "APAC" in timestamp_to_active_trading_regions(x)
|
|
44
|
+
)
|
|
45
|
+
pd_candles['emea_trading_hr'] = pd_candles['timestamp_ms'].apply(
|
|
46
|
+
lambda x: "EMEA" in timestamp_to_active_trading_regions(x)
|
|
47
|
+
)
|
|
48
|
+
pd_candles['amer_trading_hr'] = pd_candles['timestamp_ms'].apply(
|
|
49
|
+
lambda x: "AMER" in timestamp_to_active_trading_regions(x)
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
def timestamp_to_active_trading_regions(
|
|
53
|
+
timestamp_ms : int
|
|
54
|
+
) -> List[str]:
|
|
55
|
+
|
|
56
|
+
'''
|
|
57
|
+
APAC (Asia-Pacific) Trading Hours
|
|
58
|
+
UTC 22:00 - 09:00 (approximate range)
|
|
59
|
+
Major financial centers: Tokyo, Hong Kong, Singapore, Sydney
|
|
60
|
+
|
|
61
|
+
EMEA (Europe, Middle East, Africa) Trading Hours
|
|
62
|
+
UTC 07:00 - 16:00 (approximate range)
|
|
63
|
+
Major financial centers: London, Frankfurt, Paris, Zurich, Dubai
|
|
64
|
+
|
|
65
|
+
US Trading Hours
|
|
66
|
+
UTC 13:30 - 20:00 (approximate range)
|
|
67
|
+
Major financial centers: New York, Chicago
|
|
68
|
+
Key markets: NYSE, NASDAQ
|
|
69
|
+
|
|
70
|
+
utcnow and utcfromtimestamp been deprecated in Python 3.12
|
|
71
|
+
https://www.pythonmorsels.com/converting-to-utc-time/
|
|
72
|
+
'''
|
|
73
|
+
active_trading_regions : List[str] = []
|
|
74
|
+
|
|
75
|
+
dt_utc = datetime.fromtimestamp(int(timestamp_ms / 1000), tz=timezone.utc)
|
|
76
|
+
utc_hour = dt_utc.hour
|
|
77
|
+
if (utc_hour >= 22) or (utc_hour <= 9):
|
|
78
|
+
active_trading_regions.append("APAC")
|
|
79
|
+
|
|
80
|
+
if 7 <= utc_hour <= 16:
|
|
81
|
+
active_trading_regions.append("EMEA")
|
|
82
|
+
|
|
83
|
+
if 13 <= utc_hour <= 20:
|
|
84
|
+
active_trading_regions.append("AMER")
|
|
85
|
+
|
|
86
|
+
return active_trading_regions
|
|
87
|
+
|
|
88
|
+
def timestamp_to_week_of_month(timestamp_ms: int) -> int:
|
|
89
|
+
"""
|
|
90
|
+
Returns:
|
|
91
|
+
int: Week of the month (0 = first week, 1 = second week, etc.).
|
|
92
|
+
"""
|
|
93
|
+
dt = datetime.fromtimestamp(timestamp_ms / 1000)
|
|
94
|
+
day_of_month = dt.day
|
|
95
|
+
week_of_month = (day_of_month - 1) // 7
|
|
96
|
+
return week_of_month
|
|
97
|
+
|
|
38
98
|
def fix_column_types(pd_candles : pd.DataFrame):
|
|
39
99
|
pd_candles['open'] = pd_candles['open'].astype(float)
|
|
40
100
|
pd_candles['high'] = pd_candles['high'].astype(float)
|
|
@@ -14,8 +14,9 @@ siglab_py/market_data_providers/__init__.py
|
|
|
14
14
|
siglab_py/market_data_providers/aggregated_orderbook_provider.py
|
|
15
15
|
siglab_py/market_data_providers/candles_provider.py
|
|
16
16
|
siglab_py/market_data_providers/candles_ta_provider.py
|
|
17
|
+
siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py
|
|
17
18
|
siglab_py/market_data_providers/deribit_options_expiry_provider.py
|
|
18
|
-
siglab_py/market_data_providers/
|
|
19
|
+
siglab_py/market_data_providers/futu_candles_ta_to_csv.py
|
|
19
20
|
siglab_py/market_data_providers/orderbooks_provider.py
|
|
20
21
|
siglab_py/market_data_providers/test_provider.py
|
|
21
22
|
siglab_py/ordergateway/__init__.py
|
|
@@ -28,6 +29,7 @@ siglab_py/tests/integration/__init__.py
|
|
|
28
29
|
siglab_py/tests/integration/market_data_util_tests.py
|
|
29
30
|
siglab_py/tests/unit/__init__.py
|
|
30
31
|
siglab_py/tests/unit/analytic_util_tests.py
|
|
32
|
+
siglab_py/tests/unit/market_data_util_tests.py
|
|
31
33
|
siglab_py/util/__init__.py
|
|
32
34
|
siglab_py/util/analytic_util.py
|
|
33
35
|
siglab_py/util/aws_util.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/candles_ta_provider.py
RENAMED
|
File without changes
|
|
File without changes
|
{siglab_py-0.1.25 → siglab_py-0.1.26}/siglab_py/market_data_providers/orderbooks_provider.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|