siglab-py 0.1.19__py3-none-any.whl → 0.6.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. siglab_py/algo/__init__.py +0 -0
  2. siglab_py/algo/macdrsi_crosses_15m_tc_strategy.py +107 -0
  3. siglab_py/algo/strategy_base.py +122 -0
  4. siglab_py/algo/strategy_executor.py +1308 -0
  5. siglab_py/algo/tp_algo.py +529 -0
  6. siglab_py/backtests/__init__.py +0 -0
  7. siglab_py/backtests/backtest_core.py +2405 -0
  8. siglab_py/backtests/coinflip_15m_crypto.py +432 -0
  9. siglab_py/backtests/fibonacci_d_mv_crypto.py +541 -0
  10. siglab_py/backtests/macdrsi_crosses_15m_tc_crypto.py +473 -0
  11. siglab_py/constants.py +26 -1
  12. siglab_py/exchanges/binance.py +38 -0
  13. siglab_py/exchanges/deribit.py +83 -0
  14. siglab_py/exchanges/futubull.py +33 -3
  15. siglab_py/market_data_providers/candles_provider.py +11 -10
  16. siglab_py/market_data_providers/candles_ta_provider.py +5 -5
  17. siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +238 -0
  18. siglab_py/market_data_providers/futu_candles_ta_to_csv.py +224 -0
  19. siglab_py/market_data_providers/google_monitor.py +320 -0
  20. siglab_py/market_data_providers/orderbooks_provider.py +15 -12
  21. siglab_py/market_data_providers/tg_monitor.py +428 -0
  22. siglab_py/market_data_providers/{test_provider.py → trigger_provider.py} +9 -8
  23. siglab_py/ordergateway/client.py +172 -41
  24. siglab_py/ordergateway/encrypt_keys_util.py +1 -1
  25. siglab_py/ordergateway/gateway.py +456 -344
  26. siglab_py/ordergateway/test_ordergateway.py +8 -7
  27. siglab_py/tests/integration/market_data_util_tests.py +80 -6
  28. siglab_py/tests/unit/analytic_util_tests.py +67 -4
  29. siglab_py/tests/unit/market_data_util_tests.py +96 -0
  30. siglab_py/tests/unit/simple_math_tests.py +252 -0
  31. siglab_py/tests/unit/trading_util_tests.py +65 -0
  32. siglab_py/util/analytic_util.py +484 -66
  33. siglab_py/util/datetime_util.py +39 -0
  34. siglab_py/util/market_data_util.py +564 -74
  35. siglab_py/util/module_util.py +40 -0
  36. siglab_py/util/notification_util.py +78 -0
  37. siglab_py/util/retry_util.py +16 -3
  38. siglab_py/util/simple_math.py +262 -0
  39. siglab_py/util/slack_notification_util.py +59 -0
  40. siglab_py/util/trading_util.py +118 -0
  41. {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/METADATA +5 -13
  42. siglab_py-0.6.33.dist-info/RECORD +56 -0
  43. {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/WHEEL +1 -1
  44. siglab_py-0.1.19.dist-info/RECORD +0 -31
  45. {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/top_level.txt +0 -0
@@ -21,7 +21,7 @@ from ccxt.okx import okx
21
21
  from ccxt.bybit import bybit
22
22
  from ccxt.base.exchange import Exchange
23
23
 
24
- from util.market_data_util import fetch_candles
24
+ from siglab_py.util.market_data_util import fetch_candles
25
25
 
26
26
 
27
27
  '''
@@ -33,7 +33,7 @@ This script is pypy compatible:
33
33
  pypy candles_provider.py --provider_id aaa --candle_size 1h --how_many_candles 2169 --redis_ttl_ms 3600000
34
34
 
35
35
  Key parameters you may want to modify:
36
- provider_id: You can trigger this provider instance using test_provider.py. Of course, you'd write your own.
36
+ provider_id: You can trigger this provider instance using trigger_provider.py. Of course, you'd write your own.
37
37
  candle_size: 1m, 5m, 15min, 1h, 1d for example.
38
38
  how_many_candles: default to 2169 (24 x 90).
39
39
  redis_ttl_ms: This is how long orderbook snapshot will last on redis when provider publishes to it.
@@ -124,9 +124,9 @@ bybit_exchange = bybit({
124
124
  })
125
125
 
126
126
  exchanges = {
127
- f"binance_{market_type}" : binance_exchange,
128
- f"okx_{market_type}" : okx_exchange,
129
- f"bybit_{market_type}" : bybit_exchange
127
+ f"binance" : binance_exchange,
128
+ f"okx" : okx_exchange,
129
+ f"bybit" : bybit_exchange
130
130
  }
131
131
 
132
132
  def log(message : str, log_level : LogLevel = LogLevel.INFO):
@@ -209,6 +209,7 @@ def process_universe(
209
209
  last_fetch_ts = last_fetch.iloc[-1]['timestamp_ms']/1000 # type: ignore Otherwise, Error: Cannot access attribute "iloc" for class "None"
210
210
  candle_size = param['candle_size']
211
211
  interval = candle_size[-1]
212
+ num_intervals_per_candle = int(candle_size.replace(interval,""))
212
213
  number_intervals = param['how_many_candles']
213
214
 
214
215
  start_date : datetime = datetime.now()
@@ -216,7 +217,7 @@ def process_universe(
216
217
  if interval=="m":
217
218
  end_date = datetime.now()
218
219
  end_date = datetime(end_date.year, end_date.month, end_date.day, end_date.hour, end_date.minute, 0)
219
- start_date = end_date + timedelta(minutes=-number_intervals)
220
+ start_date = end_date + timedelta(minutes=-num_intervals_per_candle*number_intervals)
220
221
 
221
222
  num_sec_since_last_fetch = (end_date.timestamp() - last_fetch_ts) if last_fetch_ts else sys.maxsize
222
223
  fetch_again = True if num_sec_since_last_fetch >= 60 / 10 else False
@@ -224,7 +225,7 @@ def process_universe(
224
225
  elif interval=="h":
225
226
  end_date = datetime.now()
226
227
  end_date = datetime(end_date.year, end_date.month, end_date.day, end_date.hour, 0, 0)
227
- start_date = end_date + timedelta(hours=-number_intervals)
228
+ start_date = end_date + timedelta(hours=-num_intervals_per_candle*number_intervals)
228
229
 
229
230
  num_sec_since_last_fetch = (end_date.timestamp() - last_fetch_ts) if last_fetch_ts else sys.maxsize
230
231
  fetch_again = True if num_sec_since_last_fetch >= 60*60 / 10 else False
@@ -232,7 +233,7 @@ def process_universe(
232
233
  elif interval=="d":
233
234
  end_date = datetime.now()
234
235
  end_date = datetime(end_date.year, end_date.month, end_date.day, 0, 0, 0)
235
- start_date = end_date + timedelta(days=-number_intervals)
236
+ start_date = end_date + timedelta(days=-num_intervals_per_candle*number_intervals)
236
237
 
237
238
  num_sec_since_last_fetch = (end_date.timestamp() - last_fetch_ts) if last_fetch_ts else sys.maxsize
238
239
  fetch_again = True if num_sec_since_last_fetch >= 24*60*60 / 10 else False
@@ -284,7 +285,7 @@ def process_universe(
284
285
 
285
286
  redis_set_elapsed_ms = int((time.time() - start) *1000)
286
287
 
287
- log(f"published candles {this_row_header} {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms")
288
+ log(f"published candles {candles[ticker].shape[0]} rows. {this_row_header} {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms")
288
289
 
289
290
  except Exception as loop_error:
290
291
  log(f"Failed to process {this_row_header}. Error: {loop_error} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}")
@@ -318,7 +319,7 @@ async def main():
318
319
  self.universe_reload_id = universe_reload_id
319
320
  task = None
320
321
 
321
- log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use test_provider.py to trigger it)")
322
+ log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use trigger_provider.py to trigger it)")
322
323
 
323
324
  universe_reload_id = 1
324
325
  for message in redis_pubsub.listen():
@@ -17,7 +17,7 @@ import pandas as pd
17
17
  import numpy as np
18
18
  from redis import StrictRedis
19
19
 
20
- from util.analytic_util import compute_candles_stats
20
+ from siglab_py.util.analytic_util import compute_candles_stats
21
21
 
22
22
  '''
23
23
  candles_provider.py will feed candles to redis.
@@ -219,18 +219,18 @@ def work(
219
219
  https://redis.io/commands/set/
220
220
  '''
221
221
  expiry_sec : int = 0
222
- if candle_size=="m":
222
+ if candle_size[-1]=="m":
223
223
  expiry_sec = 60 + 60*15
224
- elif candle_size=="h":
224
+ elif candle_size[-1]=="h":
225
225
  expiry_sec = 60*60 + 60*15
226
- elif candle_size=="d":
226
+ elif candle_size[-1]=="d":
227
227
  expiry_sec = 60*60*24
228
228
  expiry_sec += 60*15 # additional 15min
229
229
 
230
230
  redis_client.set(name=publish_key, value=json.dumps(data).encode('utf-8'), ex=expiry_sec)
231
231
  redis_set_elapsed_ms = int((time.time() - start) *1000)
232
232
 
233
- log(f"published candles {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms, compute_candles_stats_elapsed_ms: {compute_candles_stats_elapsed_ms}")
233
+ log(f"published candles {pd_candles.shape[0]} rows. {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms, compute_candles_stats_elapsed_ms: {compute_candles_stats_elapsed_ms}")
234
234
  else:
235
235
  log(f"{s_key} message with hash {message_hash} been processed previously.")
236
236
 
@@ -0,0 +1,238 @@
1
+ from ctypes import ArgumentError
2
+ import sys
3
+ import logging
4
+ import argparse
5
+ from datetime import datetime, timedelta
6
+ import time
7
+ from typing import Dict, Union, Any
8
+ from enum import Enum
9
+ import asyncio
10
+ import pandas as pd
11
+
12
+ from ccxt.base.exchange import Exchange as CCXTExchange
13
+ from ccxt.binance import binance
14
+ from ccxt.bybit import bybit
15
+ from ccxt.okx import okx
16
+ from ccxt.deribit import deribit
17
+ from ccxt.kraken import kraken
18
+ from ccxt.hyperliquid import hyperliquid
19
+
20
+ from siglab_py.exchanges.futubull import Futubull
21
+ from siglab_py.util.market_data_util import fetch_candles
22
+ # from util.market_data_util import fetch_candles # For debug only
23
+ from siglab_py.util.analytic_util import compute_candles_stats
24
+ # from util.analytic_util import compute_candles_stats # For debug only
25
+
26
+ '''
27
+ Usage:
28
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
29
+ python ccxt_candles_ta_to_csv.py --exchange_name okx --symbol BTC/USDT:USDT --candle_size 1h --end_date "2025-04-22 0:0:0" --start_date "2024-01-01 0:0:0" --default_type linear --compute_ta Y --pypy_compatible N
30
+
31
+ (Remember: python -mpip install siglab_py)
32
+
33
+ This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
34
+ pypy ccxt_candles_ta_to_csv.py --exchange_name bybit --symbol BTC/USDT:USDT --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --default_type linear --compute_ta Y --pypy_compatible Y
35
+
36
+ (Remember: pypy -mpip install siglab_py)
37
+
38
+ Other arguments:
39
+ candle_size: default 1h (Hourly candles). You can specify 1d, 1m ...etc
40
+ ma_long_intervals (default 24), ma_short_intervals (default 8):
41
+ analytic_util.compute_candles_stats employ sliding windows to calculate things like std (Standard Deviation), EMA/SMAs, and actually most other technical indicators.
42
+ compute_candles_stats calculate certain things, for example EMA, in two levels: 'long' vs 'short'
43
+ 'long' refers to 'higher timeframe' - this uses a bigger sliding window specified by 'ma_long_intervals'
44
+ 'short' refers to 'lower timeframes' - this uses a smaller sliding window specified by 'ma_short_intervals'
45
+
46
+ compute_ta: Whether you wish to compute technical indicators? Y or N (Default)
47
+ pypy_compatible: Some technical indicators requires python libraries that's not pypy compatible, such as statsmodels.api (slopes and divergence calc) and scipy.stats.linregress. Set to Y, then analytic_util.compute_candles_stats will skip calculations which requires these libraries.
48
+
49
+ If debugging from VSCode, launch.json:
50
+
51
+ {
52
+ "version": "0.2.0",
53
+ "configurations": [
54
+ {
55
+ "name": "Python Debugger: Current File",
56
+ "type": "debugpy",
57
+ "request": "launch",
58
+ "program": "${file}",
59
+ "console": "integratedTerminal",
60
+ "args" : [
61
+ "--exchange_name", "bybit",
62
+ "--symbol", "BTC/USDT:USDT",
63
+ "--end_date", "2025-04-22 0:0:0",
64
+ "--start_date", "2024-01-01 0:0:0",
65
+ "--default_type", "linear",
66
+ "--compute_ta", "Y",
67
+ "--pypy_compatible", "N"
68
+ ],
69
+ "env": {
70
+ "PYTHONPATH": "${workspaceFolder}"
71
+ }
72
+ }
73
+ ]
74
+ }
75
+ '''
76
+ end_date : datetime = datetime.today()
77
+ end_date = datetime(end_date.year, end_date.month, end_date.day)
78
+ start_date : datetime = end_date - timedelta(days=365)
79
+
80
+ param : Dict = {
81
+ 'exchange' : 'bybit',
82
+ 'symbol' : None,
83
+ 'start_date' : start_date,
84
+ 'end_date' : end_date,
85
+ 'exchange_params' : {
86
+ 'rateLimit' : 100, # in ms
87
+ 'options' : {
88
+ 'defaultType' : "linear"
89
+ }
90
+ },
91
+ 'output_filename' : 'candles_ta_$SYMBOL$.csv'
92
+ }
93
+
94
+ class LogLevel(Enum):
95
+ CRITICAL = 50
96
+ ERROR = 40
97
+ WARNING = 30
98
+ INFO = 20
99
+ DEBUG = 10
100
+ NOTSET = 0
101
+
102
+ logging.Formatter.converter = time.gmtime
103
+ logger = logging.getLogger()
104
+ log_level = logging.INFO # DEBUG --> INFO --> WARNING --> ERROR
105
+ logger.setLevel(log_level)
106
+ format_str = '%(asctime)s %(message)s'
107
+ formatter = logging.Formatter(format_str)
108
+ sh = logging.StreamHandler()
109
+ sh.setLevel(log_level)
110
+ sh.setFormatter(formatter)
111
+ logger.addHandler(sh)
112
+
113
+ def log(message : str, log_level : LogLevel = LogLevel.INFO):
114
+ if log_level.value<LogLevel.WARNING.value:
115
+ logger.info(f"{datetime.now()} {message}")
116
+
117
+ elif log_level.value==LogLevel.WARNING.value:
118
+ logger.warning(f"{datetime.now()} {message}")
119
+
120
+ elif log_level.value==LogLevel.ERROR.value:
121
+ logger.error(f"{datetime.now()} {message}")
122
+
123
+ def parse_args():
124
+ parser = argparse.ArgumentParser() # type: ignore
125
+ parser.add_argument("--exchange_name", help="Exchange name. bybit, okx, bybit, deribit, hyperliquid ...etc, add whatever you want top of script, import them. Then add to instantiate_exchange.", default="bybit")
126
+ parser.add_argument("--symbol", help="symbol, CEX example BTC/USDT for spot. BTC/USDT:USDT for perpetuals. Many DEXes offer USDC pairs.", default="BTC/USDT:USDT")
127
+ parser.add_argument("--start_date", help="Format: %Y-%m-%d %H:%M:%S", default=None)
128
+ parser.add_argument("--end_date", help="Format: %Y-%m-%d %H:%M:%S", default=None)
129
+
130
+ '''
131
+ Enums here:
132
+ https://openapi.futunn.com/futu-api-doc/en/quote/quote.html#66
133
+ https://openapi.futunn.com/futu-api-doc/en/trade/trade.html#9434
134
+ '''
135
+ parser.add_argument("--default_type", help="Depends on exchange. Very often, spot, linear/swap for perpetuals. Have a look at gateway.py instantiate_exchange https://github.com/r0bbar/siglab/blob/master/siglab_py/ordergateway/gateway.py", default="linear")
136
+
137
+ parser.add_argument("--compute_ta", help="Compute technical indicators?. Y or N (default).", default='N')
138
+ parser.add_argument("--candle_size", help="candle interval: 1m, 1h, 1d... etc", default='1h')
139
+ parser.add_argument("--ma_long_intervals", help="Sliding Window size in number of intervals for higher timeframe", default=24)
140
+ parser.add_argument("--ma_short_intervals", help="Sliding Window size in number of intervals for lower timeframe", default=8)
141
+ parser.add_argument("--boillenger_std_multiples", help="Boillenger bands: # std", default=2)
142
+
143
+ parser.add_argument("--pypy_compatible", help="pypy_compatible: If Y, analytic_util will import statsmodels.api (slopes and divergence calc). In any case, partition_sliding_window requires scipy.stats.linregress and cannot be used with pypy. Y or N (default).", default='N')
144
+
145
+ args = parser.parse_args()
146
+ param['exchange_name'] = args.exchange_name.strip().lower()
147
+ param['symbol'] = args.symbol.strip().upper()
148
+
149
+ param['start_date'] = datetime.strptime(args.start_date, "%Y-%m-%d %H:%M:%S") if args.start_date else start_date
150
+ param['end_date'] = datetime.strptime(args.end_date, "%Y-%m-%d %H:%M:%S") if args.end_date else end_date
151
+
152
+ param['exchange_params']['options']['defaultType'] = args.default_type
153
+
154
+ param['output_filename'] = param['output_filename'].replace('$SYMBOL$', param['symbol'].replace(":",".").replace("/","."))
155
+
156
+ if args.compute_ta:
157
+ if args.compute_ta=='Y':
158
+ param['compute_ta'] = True
159
+ else:
160
+ param['compute_ta'] = False
161
+ else:
162
+ param['compute_ta'] = False
163
+ param['candle_size'] = args.candle_size
164
+ param['ma_long_intervals'] = int(args.ma_long_intervals)
165
+ param['ma_short_intervals'] = int(args.ma_short_intervals)
166
+ param['boillenger_std_multiples'] = int(args.boillenger_std_multiples)
167
+
168
+ if args.pypy_compatible:
169
+ if args.pypy_compatible=='Y':
170
+ param['pypy_compatible'] = True
171
+ else:
172
+ param['pypy_compatible'] = False
173
+ else:
174
+ param['pypy_compatible'] = False
175
+
176
+ def instantiate_exchange(
177
+ exchange_name : str,
178
+ exchange_params : Dict[str, Any]
179
+ ) -> CCXTExchange:
180
+ if exchange_name=='binance':
181
+ return binance(exchange_params)
182
+ elif exchange_name=='bybit':
183
+ return bybit(exchange_params)
184
+ elif exchange_name=='okx':
185
+ return okx(exchange_params)
186
+ elif exchange_name=='deribit':
187
+ return deribit(exchange_params)
188
+ else:
189
+ raise ArgumentError(f"Unsupported exchange {exchange_name}. Please import top of script and add to instantiate_exchange.")
190
+
191
+ async def main():
192
+ parse_args()
193
+
194
+ fh = logging.FileHandler(f"ccxt_candles_ta_to_csv.log")
195
+ fh.setLevel(log_level)
196
+ fh.setFormatter(formatter)
197
+ logger.addHandler(fh) # type: ignore
198
+
199
+ exchange = instantiate_exchange(param['exchange_name'], param['exchange_params'])
200
+ markets = exchange.load_markets()
201
+ if param['symbol'] not in markets:
202
+ raise ArgumentError(f"{param['symbol']} not support by {param['exchange_name']}")
203
+
204
+ pd_candles: Union[pd.DataFrame, None] = fetch_candles(
205
+ start_ts=int(param['start_date'].timestamp()),
206
+ end_ts=int(param['end_date'].timestamp()),
207
+ exchange=exchange,
208
+ normalized_symbols=[ param['symbol'] ],
209
+ candle_size=param['candle_size']
210
+ )[param['symbol']]
211
+
212
+ assert pd_candles is not None
213
+
214
+ if pd_candles is not None:
215
+ assert len(pd_candles) > 0, "No candles returned."
216
+ expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
217
+ assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
218
+ assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
219
+ assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
220
+
221
+ if param['compute_ta']:
222
+ start = time.time()
223
+ compute_candles_stats(
224
+ pd_candles=pd_candles,
225
+ boillenger_std_multiples=param['boillenger_std_multiples'],
226
+ sliding_window_how_many_candles=param['ma_long_intervals'],
227
+ slow_fast_interval_ratio=(param['ma_long_intervals']/param['ma_short_intervals']),
228
+ pypy_compat=param['pypy_compatible']
229
+ )
230
+ compute_candles_stats_elapsed_ms = int((time.time() - start) *1000)
231
+ log(f"TA calculated, took {compute_candles_stats_elapsed_ms} ms")
232
+
233
+ log(f"Candles (# rows: {pd_candles.shape[0]}) written to {param['output_filename']}")
234
+ pd_candles.to_csv(param['output_filename'])
235
+
236
+ sys.exit()
237
+
238
+ asyncio.run(main())
@@ -0,0 +1,224 @@
1
+ import sys
2
+ import logging
3
+ import argparse
4
+ from datetime import datetime, timedelta
5
+ import time
6
+ from typing import Dict, Union
7
+ from enum import Enum
8
+ import asyncio
9
+ import pandas as pd
10
+
11
+ from futu import *
12
+
13
+ from siglab_py.exchanges.futubull import Futubull
14
+ from siglab_py.util.market_data_util import fetch_candles
15
+ from siglab_py.util.analytic_util import compute_candles_stats
16
+
17
+ '''
18
+ Usage:
19
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
20
+ python futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --candle_size 1h --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
21
+ python futu_candles_ta_to_csv.py --symbol HK.02840 --end_date "2025-07-30 0:0:0" --start_date "2018-01-01 0:0:0" --candle_size 1h --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
22
+ python futu_candles_ta_to_csv.py --symbol AAPL --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --candle_size 1h --market US --trdmarket US --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
23
+
24
+ Gold contracts? Note, symbol is case sensitive with Futu. So, "GCmain" is correct, "GCMain" is in-correct.
25
+ python futu_candles_ta_to_csv.py --symbol US.GCmain --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --market US --trdmarket FUTURES --security_firm FUTUSECURITIES --security_type FUTURE --compute_ta Y --pypy_compatible N
26
+
27
+ (Remember: python -mpip install siglab_py)
28
+
29
+ Gold future contract specification: https://www.futunn.com/en/futures/GCMAIN-US/contract-specs
30
+
31
+ This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
32
+ pypy futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible Y
33
+
34
+ (Remember: pypy -mpip install siglab_py)
35
+
36
+ Other arguments:
37
+ candle_size: default 1h (Hourly candles). You can specify 1d, 1m ...etc
38
+ ma_long_intervals (default 24), ma_short_intervals (default 8):
39
+ analytic_util.compute_candles_stats employ sliding windows to calculate things like std (Standard Deviation), EMA/SMAs, and actually most other technical indicators.
40
+ compute_candles_stats calculate certain things, for example EMA, in two levels: 'long' vs 'short'
41
+ 'long' refers to 'higher timeframe' - this uses a bigger sliding window specified by 'ma_long_intervals'
42
+ 'short' refers to 'lower timeframes' - this uses a smaller sliding window specified by 'ma_short_intervals'
43
+
44
+ compute_ta: Whether you wish to compute technical indicators? Y or N (Default)
45
+ pypy_compatible: Some technical indicators requires python libraries that's not pypy compatible, such as statsmodels.api (slopes and divergence calc) and scipy.stats.linregress. Set to Y, then analytic_util.compute_candles_stats will skip calculations which requires these libraries.
46
+
47
+ If debugging from VSCode, launch.json:
48
+
49
+ {
50
+ "version": "0.2.0",
51
+ "configurations": [
52
+ {
53
+ "name": "Python Debugger: Current File",
54
+ "type": "debugpy",
55
+ "request": "launch",
56
+ "program": "${file}",
57
+ "console": "integratedTerminal",
58
+ "args" : [
59
+ "--symbol", "HK.00700",
60
+ "--end_date", "2025-03-11 0:0:0",
61
+ "--start_date", "2024-03-11 0:0:0",
62
+ "--market", "HK",
63
+ "--trdmarket", "HK",
64
+ "--security_firm", "FUTUSECURITIES",
65
+ "--security_type", "STOCK",
66
+ "--compute_ta", "Y",
67
+ "--pypy_compatible", "N"
68
+ ],
69
+ "env": {
70
+ "PYTHONPATH": "${workspaceFolder}"
71
+ }
72
+ }
73
+ ]
74
+ }
75
+ '''
76
+ end_date : datetime = datetime.today()
77
+ end_date = datetime(end_date.year, end_date.month, end_date.day)
78
+ start_date : datetime = end_date - timedelta(days=365)
79
+
80
+ param : Dict = {
81
+ 'symbol' : None,
82
+ 'start_date' : start_date,
83
+ 'end_date' : end_date,
84
+ 'trdmarket' : TrdMarket.HK,
85
+ 'security_firm' : SecurityFirm.FUTUSECURITIES,
86
+ 'market' : Market.HK,
87
+ 'security_type' : SecurityType.STOCK,
88
+ 'daemon' : {
89
+ 'host' : '127.0.0.1',
90
+ 'port' : 11111
91
+ },
92
+ 'output_filename' : 'candles_ta_$SYMBOL$.csv'
93
+ }
94
+
95
+ class LogLevel(Enum):
96
+ CRITICAL = 50
97
+ ERROR = 40
98
+ WARNING = 30
99
+ INFO = 20
100
+ DEBUG = 10
101
+ NOTSET = 0
102
+
103
+ logging.Formatter.converter = time.gmtime
104
+ logger = logging.getLogger()
105
+ log_level = logging.INFO # DEBUG --> INFO --> WARNING --> ERROR
106
+ logger.setLevel(log_level)
107
+ format_str = '%(asctime)s %(message)s'
108
+ formatter = logging.Formatter(format_str)
109
+ sh = logging.StreamHandler()
110
+ sh.setLevel(log_level)
111
+ sh.setFormatter(formatter)
112
+ logger.addHandler(sh)
113
+
114
+ def log(message : str, log_level : LogLevel = LogLevel.INFO):
115
+ if log_level.value<LogLevel.WARNING.value:
116
+ logger.info(f"{datetime.now()} {message}")
117
+
118
+ elif log_level.value==LogLevel.WARNING.value:
119
+ logger.warning(f"{datetime.now()} {message}")
120
+
121
+ elif log_level.value==LogLevel.ERROR.value:
122
+ logger.error(f"{datetime.now()} {message}")
123
+
124
+ def parse_args():
125
+ parser = argparse.ArgumentParser() # type: ignore
126
+ parser.add_argument("--symbol", help="symbol, example HK.00700", default=None)
127
+ parser.add_argument("--start_date", help="Format: %Y-%m-%d %H:%M:%S", default=None)
128
+ parser.add_argument("--end_date", help="Format: %Y-%m-%d %H:%M:%S", default=None)
129
+
130
+ '''
131
+ Enums here:
132
+ https://openapi.futunn.com/futu-api-doc/en/quote/quote.html#66
133
+ https://openapi.futunn.com/futu-api-doc/en/trade/trade.html#9434
134
+ '''
135
+ parser.add_argument("--market", help="market: HK SH SZ US AU CA FX", default=Market.HK)
136
+ parser.add_argument("--trdmarket", help="trdmarket: HK, HKCC, HKFUND, FUTURES, CN, CA, AU, JP, MY, SG, US, USFUND", default=TrdMarket.HK)
137
+ parser.add_argument("--security_firm", help="security_firm: FUTUSECURITIES (HK), FUTUINC (US), FUTUSG (SG), FUTUAU (AU)", default=SecurityFirm.FUTUSECURITIES)
138
+ parser.add_argument("--security_type", help="STOCK, BOND, ETF, FUTURE, WARRANT, IDX ... ", default=SecurityType.STOCK)
139
+
140
+ parser.add_argument("--compute_ta", help="Compute technical indicators?. Y or N (default).", default='N')
141
+ parser.add_argument("--candle_size", help="candle interval: 1m, 1h, 1d... etc", default='1h')
142
+ parser.add_argument("--ma_long_intervals", help="Sliding Window size in number of intervals for higher timeframe", default=24)
143
+ parser.add_argument("--ma_short_intervals", help="Sliding Window size in number of intervals for lower timeframe", default=8)
144
+ parser.add_argument("--boillenger_std_multiples", help="Boillenger bands: # std", default=2)
145
+
146
+ parser.add_argument("--pypy_compatible", help="pypy_compatible: If Y, analytic_util will import statsmodels.api (slopes and divergence calc). In any case, partition_sliding_window requires scipy.stats.linregress and cannot be used with pypy. Y or N (default).", default='N')
147
+
148
+ args = parser.parse_args()
149
+ param['symbol'] = args.symbol.strip().upper()
150
+
151
+ param['start_date'] = datetime.strptime(args.start_date, "%Y-%m-%d %H:%M:%S") if args.start_date else start_date
152
+ param['end_date'] = datetime.strptime(args.end_date, "%Y-%m-%d %H:%M:%S") if args.end_date else end_date
153
+
154
+ param['market'] = args.market
155
+ param['trdmarket'] = args.trdmarket
156
+ param['security_firm'] = args.security_firm
157
+
158
+ param['output_filename'] = param['output_filename'].replace('$SYMBOL$', param['symbol'])
159
+
160
+ if args.compute_ta:
161
+ if args.compute_ta=='Y':
162
+ param['compute_ta'] = True
163
+ else:
164
+ param['compute_ta'] = False
165
+ else:
166
+ param['compute_ta'] = False
167
+ param['candle_size'] = args.candle_size
168
+ param['ma_long_intervals'] = int(args.ma_long_intervals)
169
+ param['ma_short_intervals'] = int(args.ma_short_intervals)
170
+ param['boillenger_std_multiples'] = int(args.boillenger_std_multiples)
171
+
172
+ if args.pypy_compatible:
173
+ if args.pypy_compatible=='Y':
174
+ param['pypy_compatible'] = True
175
+ else:
176
+ param['pypy_compatible'] = False
177
+ else:
178
+ param['pypy_compatible'] = False
179
+
180
+ async def main():
181
+ parse_args()
182
+
183
+ fh = logging.FileHandler(f"futu_candles_ta_to_csv.log")
184
+ fh.setLevel(log_level)
185
+ fh.setFormatter(formatter)
186
+ logger.addHandler(fh) # type: ignore
187
+
188
+ exchange = Futubull(param)
189
+
190
+ pd_candles: Union[pd.DataFrame, None] = fetch_candles(
191
+ start_ts=int(param['start_date'].timestamp()),
192
+ end_ts=int(param['end_date'].timestamp()),
193
+ exchange=exchange,
194
+ normalized_symbols=[ param['symbol'] ],
195
+ candle_size=param['candle_size']
196
+ )[param['symbol']]
197
+
198
+ assert pd_candles is not None
199
+
200
+ if pd_candles is not None:
201
+ assert len(pd_candles) > 0, "No candles returned."
202
+ expected_columns = {'exchange', 'symbol', 'timestamp_ms', 'open', 'high', 'low', 'close', 'volume', 'datetime_utc', 'datetime', 'year', 'month', 'day', 'hour', 'minute', 'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr'}
203
+ assert set(pd_candles.columns) >= expected_columns, "Missing expected columns."
204
+ assert pd_candles['timestamp_ms'].notna().all(), "timestamp_ms column contains NaN values."
205
+ assert pd_candles['timestamp_ms'].is_monotonic_increasing, "Timestamps are not in ascending order."
206
+
207
+ if param['compute_ta']:
208
+ start = time.time()
209
+ compute_candles_stats(
210
+ pd_candles=pd_candles,
211
+ boillenger_std_multiples=param['boillenger_std_multiples'],
212
+ sliding_window_how_many_candles=param['ma_long_intervals'],
213
+ slow_fast_interval_ratio=(param['ma_long_intervals']/param['ma_short_intervals']),
214
+ pypy_compat=param['pypy_compatible']
215
+ )
216
+ compute_candles_stats_elapsed_ms = int((time.time() - start) *1000)
217
+ log(f"TA calculated, took {compute_candles_stats_elapsed_ms} ms")
218
+
219
+ log(f"Candles (# rows: {pd_candles.shape[0]}) written to {param['output_filename']}")
220
+ pd_candles.to_csv(param['output_filename'])
221
+
222
+ sys.exit()
223
+
224
+ asyncio.run(main())