siglab-py 0.1.29__py3-none-any.whl → 0.6.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siglab-py might be problematic. Click here for more details.
- siglab_py/constants.py +26 -1
- siglab_py/exchanges/binance.py +38 -0
- siglab_py/exchanges/deribit.py +83 -0
- siglab_py/exchanges/futubull.py +12 -2
- siglab_py/market_data_providers/candles_provider.py +2 -2
- siglab_py/market_data_providers/candles_ta_provider.py +3 -3
- siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +4 -4
- siglab_py/market_data_providers/futu_candles_ta_to_csv.py +7 -2
- siglab_py/market_data_providers/google_monitor.py +320 -0
- siglab_py/market_data_providers/orderbooks_provider.py +15 -12
- siglab_py/market_data_providers/tg_monitor.py +428 -0
- siglab_py/market_data_providers/{test_provider.py → trigger_provider.py} +9 -8
- siglab_py/ordergateway/client.py +172 -41
- siglab_py/ordergateway/encrypt_keys_util.py +1 -1
- siglab_py/ordergateway/gateway.py +456 -347
- siglab_py/ordergateway/test_ordergateway.py +8 -7
- siglab_py/tests/integration/market_data_util_tests.py +35 -1
- siglab_py/tests/unit/analytic_util_tests.py +47 -12
- siglab_py/tests/unit/simple_math_tests.py +235 -0
- siglab_py/tests/unit/trading_util_tests.py +65 -0
- siglab_py/util/analytic_util.py +478 -69
- siglab_py/util/market_data_util.py +487 -100
- siglab_py/util/notification_util.py +78 -0
- siglab_py/util/retry_util.py +11 -3
- siglab_py/util/simple_math.py +240 -0
- siglab_py/util/slack_notification_util.py +59 -0
- siglab_py/util/trading_util.py +118 -0
- {siglab_py-0.1.29.dist-info → siglab_py-0.6.12.dist-info}/METADATA +5 -9
- siglab_py-0.6.12.dist-info/RECORD +44 -0
- {siglab_py-0.1.29.dist-info → siglab_py-0.6.12.dist-info}/WHEEL +1 -1
- siglab_py-0.1.29.dist-info/RECORD +0 -34
- {siglab_py-0.1.29.dist-info → siglab_py-0.6.12.dist-info}/top_level.txt +0 -0
siglab_py/constants.py
CHANGED
|
@@ -1,3 +1,28 @@
|
|
|
1
|
+
import enum
|
|
1
2
|
from typing import Union, List, Dict, Any
|
|
2
3
|
|
|
3
|
-
|
|
4
|
+
INVALID : int = -1
|
|
5
|
+
|
|
6
|
+
JSON_SERIALIZABLE_TYPES = Union[str, bool, int, float, None, List[Any], Dict[Any, Any]]
|
|
7
|
+
|
|
8
|
+
class LogLevel(enum.Enum):
|
|
9
|
+
CRITICAL = 50
|
|
10
|
+
ERROR = 40
|
|
11
|
+
WARNING = 30
|
|
12
|
+
INFO = 20
|
|
13
|
+
DEBUG = 10
|
|
14
|
+
NOTSET = 0
|
|
15
|
+
|
|
16
|
+
class TrendDirection(enum.Enum):
|
|
17
|
+
UNDEFINED = 0
|
|
18
|
+
HIGHER_HIGHS = 1
|
|
19
|
+
LOWER_HIGHS = 2
|
|
20
|
+
SIDEWAYS = 3
|
|
21
|
+
HIGHER_LOWS = 4
|
|
22
|
+
LOWER_LOWS = 5
|
|
23
|
+
|
|
24
|
+
def to_string(self) -> str:
|
|
25
|
+
return self.name.lower() if self != TrendDirection.UNDEFINED else ''
|
|
26
|
+
|
|
27
|
+
OrderSide = enum.Enum('OrderSide', 'UNDEFINED BUY SELL')
|
|
28
|
+
PositionStatus = enum.Enum("PositionStatus", 'UNDEFINED OPEN CLOSED SL')
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from typing import Dict, Any
|
|
2
|
+
|
|
3
|
+
import ccxt
|
|
4
|
+
from ccxt.base.types import Balances
|
|
5
|
+
import ccxt.pro as ccxtpro
|
|
6
|
+
|
|
7
|
+
'''
|
|
8
|
+
Why override fetch_balance?
|
|
9
|
+
balances['total'] empty
|
|
10
|
+
But you can find that from exchange raw response under balances['info']['balances'] (verbose=True).
|
|
11
|
+
'''
|
|
12
|
+
def _populate_balance_total_if_missing(
|
|
13
|
+
balances : Dict[str, Any]
|
|
14
|
+
):
|
|
15
|
+
for ccy_balance in balances['info']['balances']:
|
|
16
|
+
ccy = ccy_balance['asset']
|
|
17
|
+
free = float(ccy_balance.get('free', 0))
|
|
18
|
+
locked = float(ccy_balance.get('locked', 0))
|
|
19
|
+
total = free + locked
|
|
20
|
+
if total!=0 and ccy not in balances['total']:
|
|
21
|
+
balances['total'][ccy] = total
|
|
22
|
+
class Binance(ccxt.binance):
|
|
23
|
+
def __init__(self, *args: Dict[str, Any]) -> None:
|
|
24
|
+
super().__init__(*args) # type: ignore
|
|
25
|
+
|
|
26
|
+
def fetch_balance(self, params={}) -> Balances: # type: ignore
|
|
27
|
+
balances = super().fetch_balance(params=params)
|
|
28
|
+
_populate_balance_total_if_missing(balances)
|
|
29
|
+
return balances
|
|
30
|
+
|
|
31
|
+
class BinanceAsync(ccxtpro.binance):
|
|
32
|
+
def __init__(self, *args: Dict[str, Any]) -> None:
|
|
33
|
+
super().__init__(*args) # type: ignore
|
|
34
|
+
|
|
35
|
+
async def fetch_balance(self, params={}) -> Balances: # type: ignore
|
|
36
|
+
balances = await super().fetch_balance(params=params)
|
|
37
|
+
_populate_balance_total_if_missing(balances)
|
|
38
|
+
return balances
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
from typing import Dict, Any
|
|
2
|
+
|
|
3
|
+
import ccxt
|
|
4
|
+
import ccxt.pro as ccxtpro
|
|
5
|
+
|
|
6
|
+
'''
|
|
7
|
+
Why override load_markets?
|
|
8
|
+
Deribit is one of the OG CEX. If you look at 'contractSize' for BTC/USDC:USDC, a linear perp, for example:
|
|
9
|
+
exchange.markets['BTC/USDC:USDC']['contractSize'] 0.0001
|
|
10
|
+
However, for Deribit, create_order actually expects 'amount' in base ccy, not in "# of contracts" as with most other exchanges supported by CCXT.
|
|
11
|
+
Also note that 'filled' in response from create_order also in base ccy, not in # contracts.
|
|
12
|
+
The general prevailing convention in CCXT is: 'amount' should be quoted in '# contracts'.
|
|
13
|
+
Why CCXT not fix Deribit, so that it follows the prevailing convention? This is because this would be a breaking changes.
|
|
14
|
+
Thus, we override 'contractSize' to 1 for all markets.
|
|
15
|
+
|
|
16
|
+
Additionally, we need to override 'fetch_position' as it swapped 'notional' with 'contracts'!!! Real ugly. Example below.
|
|
17
|
+
'id' = None
|
|
18
|
+
'symbol' = 'BTC/USDC:USDC'
|
|
19
|
+
'timestamp' = None
|
|
20
|
+
'datetime' = None
|
|
21
|
+
'lastUpdateTimestamp' = None
|
|
22
|
+
'initialMargin' = ???
|
|
23
|
+
'initialMarginPercentage' = ???
|
|
24
|
+
'maintenanceMargin' = ???
|
|
25
|
+
'maintenanceMarginPercentage' = ???
|
|
26
|
+
'entryPrice' = 85657.0
|
|
27
|
+
'notional' = 0.0009 <-- This is NOT USD! And this is NOT # Contracts! This is # BTC!
|
|
28
|
+
'leverage' = 50
|
|
29
|
+
'unrealizedPnl' = ???
|
|
30
|
+
'realizedPnl' = ???
|
|
31
|
+
'contracts' = 77.081445 <-- This is NOT "# contracts"! 0.0009 BTC x markPrice 85646.05
|
|
32
|
+
'contractSize' = 1.0
|
|
33
|
+
'marginRatio' = None
|
|
34
|
+
'liquidationPrice' = None
|
|
35
|
+
'markPrice' = 85646.05 <-- They use 'markPrice' to calc 'contracts'
|
|
36
|
+
'lastPrice' = None
|
|
37
|
+
'collateral' = None
|
|
38
|
+
'marginMode' = None
|
|
39
|
+
'side' = 'long'
|
|
40
|
+
'percentage' = None
|
|
41
|
+
'hedged' = None
|
|
42
|
+
'stopLossPrice' = None
|
|
43
|
+
'takeProfitPrice' = None
|
|
44
|
+
'''
|
|
45
|
+
class Deribit(ccxt.deribit):
|
|
46
|
+
def __init__(self, *args: Dict[str, Any]) -> None:
|
|
47
|
+
super().__init__(*args) # type: ignore
|
|
48
|
+
|
|
49
|
+
def load_markets(self, reload=False, params={}):
|
|
50
|
+
self.markets = super().load_markets(reload=reload, params=params)
|
|
51
|
+
|
|
52
|
+
for market in self.markets:
|
|
53
|
+
self.markets[market]['contractSize'] = 1
|
|
54
|
+
|
|
55
|
+
return self.markets
|
|
56
|
+
|
|
57
|
+
def fetch_position(self, symbol: str, params={}): # type: ignore
|
|
58
|
+
position = super().fetch_position(symbol=symbol, params=params)
|
|
59
|
+
pos_usdt = position['contracts']
|
|
60
|
+
pos_baseccy = position['notional']
|
|
61
|
+
position['contracts'] = pos_baseccy
|
|
62
|
+
position['notional'] = pos_usdt
|
|
63
|
+
return position
|
|
64
|
+
|
|
65
|
+
class DeribitAsync(ccxtpro.deribit):
|
|
66
|
+
def __init__(self, *args: Dict[str, Any]) -> None:
|
|
67
|
+
super().__init__(*args) # type: ignore
|
|
68
|
+
|
|
69
|
+
async def load_markets(self, reload=False, params={}):
|
|
70
|
+
self.markets = await super().load_markets(reload=reload, params=params)
|
|
71
|
+
|
|
72
|
+
for market in self.markets:
|
|
73
|
+
self.markets[market]['contractSize'] = 1
|
|
74
|
+
|
|
75
|
+
return self.markets
|
|
76
|
+
|
|
77
|
+
async def fetch_position(self, symbol: str, params={}): # type: ignore
|
|
78
|
+
position = await super().fetch_position(symbol=symbol, params=params)
|
|
79
|
+
pos_usdt = position['contracts']
|
|
80
|
+
pos_baseccy = position['notional']
|
|
81
|
+
position['contracts'] = pos_baseccy
|
|
82
|
+
position['notional'] = pos_usdt
|
|
83
|
+
return position
|
siglab_py/exchanges/futubull.py
CHANGED
|
@@ -4,7 +4,7 @@ https://www.futuhk.com/en/support/categories/909?global_content=%7B%22promote_id
|
|
|
4
4
|
|
|
5
5
|
Fees: https://www.futuhk.com/en/commissionnew#crypto
|
|
6
6
|
|
|
7
|
-
Subscribe L2 data: https://openapi.futunn.com/futu-api-doc/en/intro/authority.html
|
|
7
|
+
Subscribe L2 data: https://openapi.futunn.com/futu-api-doc/en/intro/authority.html
|
|
8
8
|
|
|
9
9
|
Investor Protection: https://www.futuhk.com/en
|
|
10
10
|
|
|
@@ -15,8 +15,17 @@ Margin Trading:
|
|
|
15
15
|
Download Futu OpenD
|
|
16
16
|
https://www.futuhk.com/en/support/topic1_464?global_content=%7B%22promote_id%22%3A13765%2C%22sub_promote_id%22%3A10%7D
|
|
17
17
|
|
|
18
|
-
If you run the installer version "Futu_OpenD-GUI_9.0.5008_Windows.exe", it'd be installed under:
|
|
18
|
+
If you run the installer version "Futu_OpenD-GUI_9.0.5008_Windows.exe", it'd be installed under C-Drive:
|
|
19
19
|
C:\\Users\\xxx\\AppData\\Roaming\\Futu_OpenD\\Futu_OpenD.exe
|
|
20
|
+
Unfortunately, log folder also under C-drive as a result, and they are big.
|
|
21
|
+
|
|
22
|
+
For command line version: https://openapi.futunn.com/futu-api-doc/opend/opend-cmd.html
|
|
23
|
+
Binary under downloaded package (You can put it under for example D-drive):
|
|
24
|
+
...\Futu_OpenD_9.4.5408_Windows\Futu_OpenD_9.4.5408_Windows
|
|
25
|
+
|
|
26
|
+
Put a batch file "start_futu_opend.bat", if login_pwd include special characters, enclose pwd with double quotes:
|
|
27
|
+
FutuOpenD -login_account=1234567 -login_pwd="... Your Secret here ..."
|
|
28
|
+
Config file is "FutuOpenD.xml", you can adjust logging verbosity here.
|
|
20
29
|
|
|
21
30
|
Architecture: https://openapi.futunn.com/futu-api-doc/en/intro/intro.html
|
|
22
31
|
|
|
@@ -30,6 +39,7 @@ API
|
|
|
30
39
|
stock basic info https://openapi.futunn.com/futu-api-doc/en/quote/get-static-info.html
|
|
31
40
|
historical candles https://openapi.futunn.com/futu-api-doc/en/quote/request-history-kline.html
|
|
32
41
|
realtime candles https://openapi.futunn.com/futu-api-doc/en/quote/get-kl.html
|
|
42
|
+
orderbook https://openapi.futunn.com/futu-api-doc/en/quote/get-order-book.html
|
|
33
43
|
real time quote https://openapi.futunn.com/futu-api-doc/en/quote/get-stock-quote.html
|
|
34
44
|
open orders https://openapi.futunn.com/futu-api-doc/en/trade/get-order-list.html
|
|
35
45
|
historical orders https://openapi.futunn.com/futu-api-doc/en/trade/get-history-order-list.html
|
|
@@ -33,7 +33,7 @@ This script is pypy compatible:
|
|
|
33
33
|
pypy candles_provider.py --provider_id aaa --candle_size 1h --how_many_candles 2169 --redis_ttl_ms 3600000
|
|
34
34
|
|
|
35
35
|
Key parameters you may want to modify:
|
|
36
|
-
provider_id: You can trigger this provider instance using
|
|
36
|
+
provider_id: You can trigger this provider instance using trigger_provider.py. Of course, you'd write your own.
|
|
37
37
|
candle_size: 1m, 5m, 15min, 1h, 1d for example.
|
|
38
38
|
how_many_candles: default to 2169 (24 x 90).
|
|
39
39
|
redis_ttl_ms: This is how long orderbook snapshot will last on redis when provider publishes to it.
|
|
@@ -318,7 +318,7 @@ async def main():
|
|
|
318
318
|
self.universe_reload_id = universe_reload_id
|
|
319
319
|
task = None
|
|
320
320
|
|
|
321
|
-
log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use
|
|
321
|
+
log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use trigger_provider.py to trigger it)")
|
|
322
322
|
|
|
323
323
|
universe_reload_id = 1
|
|
324
324
|
for message in redis_pubsub.listen():
|
|
@@ -219,11 +219,11 @@ def work(
|
|
|
219
219
|
https://redis.io/commands/set/
|
|
220
220
|
'''
|
|
221
221
|
expiry_sec : int = 0
|
|
222
|
-
if candle_size=="m":
|
|
222
|
+
if candle_size[-1]=="m":
|
|
223
223
|
expiry_sec = 60 + 60*15
|
|
224
|
-
elif candle_size=="h":
|
|
224
|
+
elif candle_size[-1]=="h":
|
|
225
225
|
expiry_sec = 60*60 + 60*15
|
|
226
|
-
elif candle_size=="d":
|
|
226
|
+
elif candle_size[-1]=="d":
|
|
227
227
|
expiry_sec = 60*60*24
|
|
228
228
|
expiry_sec += 60*15 # additional 15min
|
|
229
229
|
|
|
@@ -26,8 +26,8 @@ from siglab_py.util.analytic_util import compute_candles_stats
|
|
|
26
26
|
'''
|
|
27
27
|
Usage:
|
|
28
28
|
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
29
|
-
python ccxt_candles_ta_to_csv.py --exchange_name
|
|
30
|
-
|
|
29
|
+
python ccxt_candles_ta_to_csv.py --exchange_name okx --symbol BTC/USDT:USDT --candle_size 1h --end_date "2025-04-22 0:0:0" --start_date "2024-01-01 0:0:0" --default_type linear --compute_ta Y --pypy_compatible N
|
|
30
|
+
|
|
31
31
|
(Remember: python -mpip install siglab_py)
|
|
32
32
|
|
|
33
33
|
This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
|
|
@@ -60,8 +60,8 @@ If debugging from VSCode, launch.json:
|
|
|
60
60
|
"args" : [
|
|
61
61
|
"--exchange_name", "bybit",
|
|
62
62
|
"--symbol", "BTC/USDT:USDT",
|
|
63
|
-
"--end_date", "2025-
|
|
64
|
-
"--start_date", "2024-
|
|
63
|
+
"--end_date", "2025-04-22 0:0:0",
|
|
64
|
+
"--start_date", "2024-01-01 0:0:0",
|
|
65
65
|
"--default_type", "linear",
|
|
66
66
|
"--compute_ta", "Y",
|
|
67
67
|
"--pypy_compatible", "N"
|
|
@@ -17,12 +17,17 @@ from siglab_py.util.analytic_util import compute_candles_stats
|
|
|
17
17
|
'''
|
|
18
18
|
Usage:
|
|
19
19
|
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
20
|
-
python futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
20
|
+
python futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --candle_size 1h --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
21
|
+
python futu_candles_ta_to_csv.py --symbol HK.02840 --end_date "2025-07-30 0:0:0" --start_date "2018-01-01 0:0:0" --candle_size 1h --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
22
|
+
python futu_candles_ta_to_csv.py --symbol AAPL --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --candle_size 1h --market US --trdmarket US --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
21
23
|
|
|
22
|
-
|
|
24
|
+
Gold contracts? Note, symbol is case sensitive with Futu. So, "GCmain" is correct, "GCMain" is in-correct.
|
|
25
|
+
python futu_candles_ta_to_csv.py --symbol US.GCmain --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --market US --trdmarket FUTURES --security_firm FUTUSECURITIES --security_type FUTURE --compute_ta Y --pypy_compatible N
|
|
23
26
|
|
|
24
27
|
(Remember: python -mpip install siglab_py)
|
|
25
28
|
|
|
29
|
+
Gold future contract specification: https://www.futunn.com/en/futures/GCMAIN-US/contract-specs
|
|
30
|
+
|
|
26
31
|
This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
|
|
27
32
|
pypy futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible Y
|
|
28
33
|
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import sys
|
|
3
|
+
import traceback
|
|
4
|
+
import os
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import hashlib
|
|
8
|
+
import re
|
|
9
|
+
from datetime import datetime, timedelta, timezone
|
|
10
|
+
import time
|
|
11
|
+
import pytz
|
|
12
|
+
import arrow
|
|
13
|
+
from enum import Enum
|
|
14
|
+
import logging
|
|
15
|
+
import requests
|
|
16
|
+
from typing import Dict, Optional, Set, Any, Union, List
|
|
17
|
+
from redis import StrictRedis
|
|
18
|
+
|
|
19
|
+
from siglab_py.util.notification_util import dispatch_notification
|
|
20
|
+
|
|
21
|
+
current_filename = os.path.basename(__file__)
|
|
22
|
+
|
|
23
|
+
'''
|
|
24
|
+
google_monitor fetches messages from particular query. Then:
|
|
25
|
+
a. Save (and accumulate) messages to message cache file (No duplicates) for further analysis.
|
|
26
|
+
message_cache_file: str = f"google_search_messages.json"
|
|
27
|
+
|
|
28
|
+
b. If any of keywords in message_keywords_filter matches words in message (--message_keywords_filter):
|
|
29
|
+
- Publish to redis for strategy consumption, topic: param['mds']['topics']['google_alert']
|
|
30
|
+
- Dispatch slack alert
|
|
31
|
+
- If scripts runs on Windows, play a wav file (Feels free make modification play sounds on Ubuntu for example)
|
|
32
|
+
|
|
33
|
+
Usage:
|
|
34
|
+
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
35
|
+
python google_monitor.py --apikey xxx --search_engine_id yyy --query "site:twitter.com @user_id1 @user_id2 some topic" --slack_info_url=https://hooks.slack.com/services/xxx --slack_critial_url=https://hooks.slack.com/services/xxx --slack_alert_url=https://hooks.slack.com/services/xxx
|
|
36
|
+
|
|
37
|
+
alert_wav_path
|
|
38
|
+
Point it to wav file for alert notification. It's using 'winsound', i.e. Windows only.
|
|
39
|
+
Set to None otherwise.
|
|
40
|
+
|
|
41
|
+
Google API: https://console.cloud.google.com/apis/credentials?project=YOUR_PROJECT
|
|
42
|
+
name: YOUR_API_KEY_NAME
|
|
43
|
+
apikey: ?????
|
|
44
|
+
|
|
45
|
+
Google Search Engine
|
|
46
|
+
To create
|
|
47
|
+
name: siglab_py_search: https://programmablesearchengine.google.com/controlpanel/create
|
|
48
|
+
<script async src="https://cse.google.com/cse.js?cx=YOUR_SEARCH_ENGINE_ID">
|
|
49
|
+
</script>
|
|
50
|
+
<div class="gcse-search"></div>
|
|
51
|
+
Then enable it: https://console.developers.google.com/apis/api/customsearch.googleapis.com/overview?project=?????
|
|
52
|
+
|
|
53
|
+
launch.json for Debugging from VSCode:
|
|
54
|
+
{
|
|
55
|
+
"version": "0.2.0",
|
|
56
|
+
"configurations": [
|
|
57
|
+
{
|
|
58
|
+
"name": "Python: Current File",
|
|
59
|
+
"type": "python",
|
|
60
|
+
"request": "launch",
|
|
61
|
+
"program": "${file}",
|
|
62
|
+
"console": "integratedTerminal",
|
|
63
|
+
"justMyCode": false,
|
|
64
|
+
"args" : [
|
|
65
|
+
"--apikey", "xxx",
|
|
66
|
+
"--search_engine_id", "yyy",
|
|
67
|
+
"--query", "site:twitter.com @user_id1 @user_id2 some topic",
|
|
68
|
+
"--slack_info_url", "https://hooks.slack.com/services/xxx",
|
|
69
|
+
"--slack_critial_url", "https://hooks.slack.com/services/xxx",
|
|
70
|
+
"--slack_alert_url", "https://hooks.slack.com/services/xxx",
|
|
71
|
+
],
|
|
72
|
+
}
|
|
73
|
+
]
|
|
74
|
+
}
|
|
75
|
+
'''
|
|
76
|
+
|
|
77
|
+
param: Dict[str, Any] = {
|
|
78
|
+
'apikey': os.getenv('GOOGLE_APIKEY', 'xxx'),
|
|
79
|
+
'search_engine_id': os.getenv('GOOGLE_SEARCH_ENGINE_ID', 'xxx'),
|
|
80
|
+
'num_results' : 10,
|
|
81
|
+
'query' : '',
|
|
82
|
+
'alert_wav_path' : r"d:\sounds\terrible.wav",
|
|
83
|
+
"num_shouts" : 5, # How many times 'alert_wav_path' is played
|
|
84
|
+
"loop_freq_ms" : 1000*60*15, # Google allow max 100 calls per day free.
|
|
85
|
+
'current_filename' : current_filename,
|
|
86
|
+
|
|
87
|
+
'notification' : {
|
|
88
|
+
'footer' : None,
|
|
89
|
+
|
|
90
|
+
# slack webhook url's for notifications
|
|
91
|
+
'slack' : {
|
|
92
|
+
'info' : { 'webhook_url' : None },
|
|
93
|
+
'critical' : { 'webhook_url' : None },
|
|
94
|
+
'alert' : { 'webhook_url' : None },
|
|
95
|
+
}
|
|
96
|
+
},
|
|
97
|
+
|
|
98
|
+
'mds': {
|
|
99
|
+
'topics': {
|
|
100
|
+
'tg_alert': 'tg_alert'
|
|
101
|
+
},
|
|
102
|
+
'redis': {
|
|
103
|
+
'host': 'localhost',
|
|
104
|
+
'port': 6379,
|
|
105
|
+
'db': 0,
|
|
106
|
+
'ttl_ms': 1000 * 60 * 15
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
class LogLevel(Enum):
|
|
112
|
+
CRITICAL = 50
|
|
113
|
+
ERROR = 40
|
|
114
|
+
WARNING = 30
|
|
115
|
+
INFO = 20
|
|
116
|
+
DEBUG = 10
|
|
117
|
+
NOTSET = 0
|
|
118
|
+
|
|
119
|
+
logging.Formatter.converter = time.gmtime
|
|
120
|
+
logger: logging.Logger = logging.getLogger()
|
|
121
|
+
log_level: int = logging.INFO
|
|
122
|
+
logger.setLevel(log_level)
|
|
123
|
+
format_str: str = '%(asctime)s %(message)s'
|
|
124
|
+
formatter: logging.Formatter = logging.Formatter(format_str)
|
|
125
|
+
sh: logging.StreamHandler = logging.StreamHandler()
|
|
126
|
+
sh.setLevel(log_level)
|
|
127
|
+
sh.setFormatter(formatter)
|
|
128
|
+
logger.addHandler(sh)
|
|
129
|
+
|
|
130
|
+
def log(message: str, log_level: LogLevel = LogLevel.INFO) -> None:
|
|
131
|
+
if log_level.value < LogLevel.WARNING.value:
|
|
132
|
+
logger.info(f"{datetime.now()} {message}")
|
|
133
|
+
elif log_level.value == LogLevel.WARNING.value:
|
|
134
|
+
logger.warning(f"{datetime.now()} {message}")
|
|
135
|
+
elif log_level.value == LogLevel.ERROR.value:
|
|
136
|
+
logger.error(f"{datetime.now()} {message}")
|
|
137
|
+
|
|
138
|
+
def parse_args():
|
|
139
|
+
parser = argparse.ArgumentParser() # type: ignore
|
|
140
|
+
parser.add_argument("--apikey", help="API key", default=None)
|
|
141
|
+
parser.add_argument("--search_engine_id", help="Google search engine ID", default=None)
|
|
142
|
+
parser.add_argument("--num_results", help="Max number items to fetch", default=10)
|
|
143
|
+
parser.add_argument("--query", help="Query - what are you looking for?", default=None)
|
|
144
|
+
parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
|
|
145
|
+
parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
|
|
146
|
+
parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
|
|
147
|
+
|
|
148
|
+
args = parser.parse_args()
|
|
149
|
+
|
|
150
|
+
param['apikey'] = args.apikey
|
|
151
|
+
param['search_engine_id'] = args.search_engine_id
|
|
152
|
+
param['num_results'] = args.num_results
|
|
153
|
+
param['query'] = args.query
|
|
154
|
+
|
|
155
|
+
param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
|
|
156
|
+
param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
|
|
157
|
+
param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
|
|
158
|
+
|
|
159
|
+
param['notification']['footer'] = f"From {param['current_filename']}"
|
|
160
|
+
|
|
161
|
+
print(f"Startup args: {args}") # Dont use logger, not yet setup yet.
|
|
162
|
+
print(f"param: {print(json.dumps(param, indent=2))}")
|
|
163
|
+
|
|
164
|
+
def init_redis_client() -> StrictRedis:
|
|
165
|
+
redis_client : StrictRedis = StrictRedis(
|
|
166
|
+
host = param['mds']['redis']['host'],
|
|
167
|
+
port = param['mds']['redis']['port'],
|
|
168
|
+
db = 0,
|
|
169
|
+
ssl = False
|
|
170
|
+
)
|
|
171
|
+
try:
|
|
172
|
+
redis_client.keys()
|
|
173
|
+
except ConnectionError as redis_conn_error:
|
|
174
|
+
err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
|
|
175
|
+
log(f"Failed to init redis connection. Will skip publishes to redis. {err_msg}")
|
|
176
|
+
redis_client = None # type: ignore
|
|
177
|
+
|
|
178
|
+
return redis_client
|
|
179
|
+
|
|
180
|
+
def search_google_custom(query, api_key, search_engine_id, num_results=10):
|
|
181
|
+
url = 'https://www.googleapis.com/customsearch/v1'
|
|
182
|
+
params = {
|
|
183
|
+
'key': api_key,
|
|
184
|
+
'cx': search_engine_id,
|
|
185
|
+
'q': query,
|
|
186
|
+
'num': num_results,
|
|
187
|
+
'sort': 'date',
|
|
188
|
+
'dateRestrict': 'd1' # Restrict to most recent (adjust as needed: d1=day, m1=month, etc.)
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
response = requests.get(url, params=params)
|
|
192
|
+
|
|
193
|
+
if response.status_code == 200:
|
|
194
|
+
return response.json()
|
|
195
|
+
else:
|
|
196
|
+
log(f"Query error: {response.status_code} - {response.text}")
|
|
197
|
+
return None
|
|
198
|
+
|
|
199
|
+
async def main() -> None:
|
|
200
|
+
parse_args()
|
|
201
|
+
|
|
202
|
+
message_cache_file: str = f"google_search_messages.json"
|
|
203
|
+
log(f"message_cache_file: {message_cache_file}")
|
|
204
|
+
|
|
205
|
+
notification_params : Dict[str, Any] = param['notification']
|
|
206
|
+
|
|
207
|
+
processed_messages : List[Dict[str, Any]] = []
|
|
208
|
+
seen_hashes : Set[str] = set()
|
|
209
|
+
if os.path.exists(message_cache_file):
|
|
210
|
+
with open(message_cache_file, 'r', encoding='utf-8') as f:
|
|
211
|
+
lines = f.readlines()
|
|
212
|
+
for line in lines:
|
|
213
|
+
message_data = json.loads(line)
|
|
214
|
+
message_hash: str = hashlib.sha256(message_data['message'].encode('utf-8')).hexdigest()
|
|
215
|
+
|
|
216
|
+
message_data['datetime'] = pytz.UTC.localize(arrow.get(message_data['datetime']).datetime.replace(tzinfo=None))
|
|
217
|
+
|
|
218
|
+
if message_hash not in seen_hashes:
|
|
219
|
+
seen_hashes.add(message_hash)
|
|
220
|
+
processed_messages.append(message_data)
|
|
221
|
+
|
|
222
|
+
processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
redis_client: Optional[StrictRedis] = init_redis_client()
|
|
226
|
+
except Exception as redis_err:
|
|
227
|
+
redis_client = None
|
|
228
|
+
log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
|
|
229
|
+
|
|
230
|
+
while True:
|
|
231
|
+
try:
|
|
232
|
+
results = search_google_custom(param['query'], param['apikey'], param['search_engine_id'], param['num_results'])
|
|
233
|
+
|
|
234
|
+
if results:
|
|
235
|
+
if 'items' in results:
|
|
236
|
+
for item in results['items']:
|
|
237
|
+
title = item.get('title', 'No title')
|
|
238
|
+
snippet = item.get('snippet', 'No snippet')
|
|
239
|
+
link = item.get('link', 'No link')
|
|
240
|
+
published_date = item.get('pagemap', {}).get('metatags', [{}])[0].get('article:published_time', 'No date')
|
|
241
|
+
|
|
242
|
+
dt_message = datetime.now()
|
|
243
|
+
pattern = r'^\d+\s*(?:days?|day?|hours?|hour?|minutes?|minute?|seconds?|second?|h|m|s)\s*(?:ago)?.*?([A-Za-z]+\s+\d+,\s+\d{4},\s+\d+:\d+\s+[AP]M\s+ET)'
|
|
244
|
+
match = re.match(pattern, snippet)
|
|
245
|
+
if published_date == 'No date' and match:
|
|
246
|
+
published_date = match.group(1)
|
|
247
|
+
dt_message = datetime.strptime(published_date, '%b %d, %Y, %I:%M %p ET')
|
|
248
|
+
|
|
249
|
+
snippet = re.sub(pattern, '', snippet).strip()
|
|
250
|
+
|
|
251
|
+
timestamp_ms = int(dt_message.timestamp() * 1000)
|
|
252
|
+
message_data: Dict[str, Any] = {
|
|
253
|
+
"timestamp_ms": timestamp_ms,
|
|
254
|
+
"datetime": dt_message.isoformat(), # Always in UTC
|
|
255
|
+
"title" : title,
|
|
256
|
+
"message": snippet,
|
|
257
|
+
"url" : link
|
|
258
|
+
}
|
|
259
|
+
json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
|
|
260
|
+
message_hash: str = hashlib.sha256(snippet.encode('utf-8')).hexdigest()
|
|
261
|
+
if (message_hash not in seen_hashes):
|
|
262
|
+
seen_hashes.add(message_hash)
|
|
263
|
+
processed_messages.append(message_data)
|
|
264
|
+
|
|
265
|
+
log(f"{message_data}")
|
|
266
|
+
|
|
267
|
+
dispatch_notification(title=f"{param['current_filename']} Incoming! {title}", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
|
|
268
|
+
|
|
269
|
+
with open(message_cache_file, 'a', encoding='utf-8') as f:
|
|
270
|
+
json.dump(message_data, f, ensure_ascii=False)
|
|
271
|
+
f.write('\n')
|
|
272
|
+
|
|
273
|
+
if param['alert_wav_path']and sys.platform == 'win32':
|
|
274
|
+
import winsound
|
|
275
|
+
for _ in range(param['num_shouts']):
|
|
276
|
+
winsound.PlaySound(param['alert_wav_path'], winsound.SND_FILENAME)
|
|
277
|
+
|
|
278
|
+
if redis_client:
|
|
279
|
+
try:
|
|
280
|
+
publish_topic = f"google_search"
|
|
281
|
+
redis_client.publish(publish_topic, json_str)
|
|
282
|
+
redis_client.setex(message_hash, param['mds']['redis']['ttl_ms'] // 1000, json_str)
|
|
283
|
+
log(f"Published message {json_str} to Redis topic {publish_topic}", LogLevel.INFO)
|
|
284
|
+
except Exception as e:
|
|
285
|
+
log(f"Failed to publish to Redis: {str(e)}", LogLevel.ERROR)
|
|
286
|
+
|
|
287
|
+
await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
|
|
288
|
+
|
|
289
|
+
if processed_messages:
|
|
290
|
+
oldest_message: Dict[str, Any] = min(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
291
|
+
newest_message: Dict[str, Any] = max(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
292
|
+
log(
|
|
293
|
+
json.dumps(
|
|
294
|
+
{
|
|
295
|
+
'num_messages': len(processed_messages),
|
|
296
|
+
'oldest': {
|
|
297
|
+
'timestamp_ms': oldest_message['timestamp_ms'],
|
|
298
|
+
'datetime': datetime.fromtimestamp(int(oldest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
299
|
+
},
|
|
300
|
+
'latest': {
|
|
301
|
+
'timestamp_ms': newest_message['timestamp_ms'],
|
|
302
|
+
'datetime': datetime.fromtimestamp(int(newest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
303
|
+
}
|
|
304
|
+
}, indent=2
|
|
305
|
+
),
|
|
306
|
+
LogLevel.INFO
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
except Exception as e:
|
|
310
|
+
log(f"Oops {str(e)} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}", LogLevel.ERROR)
|
|
311
|
+
finally:
|
|
312
|
+
await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
|
|
313
|
+
|
|
314
|
+
if __name__ == '__main__':
|
|
315
|
+
try:
|
|
316
|
+
asyncio.run(main())
|
|
317
|
+
except KeyboardInterrupt:
|
|
318
|
+
log("Stopped by user", LogLevel.INFO)
|
|
319
|
+
except Exception as e:
|
|
320
|
+
log(f"Unexpected error: {str(e)}", LogLevel.ERROR)
|
|
@@ -20,6 +20,8 @@ from redis.client import PubSub
|
|
|
20
20
|
from ccxt.base.exchange import Exchange
|
|
21
21
|
import ccxt.pro as ccxtpro
|
|
22
22
|
|
|
23
|
+
from siglab_py.util.market_data_util import async_instantiate_exchange
|
|
24
|
+
|
|
23
25
|
'''
|
|
24
26
|
To start from command prompt:
|
|
25
27
|
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab
|
|
@@ -28,7 +30,7 @@ To start from command prompt:
|
|
|
28
30
|
This script is pypy compatible.
|
|
29
31
|
|
|
30
32
|
Key parameters you may want to modify:
|
|
31
|
-
provider_id: You can trigger this provider instance using
|
|
33
|
+
provider_id: You can trigger this provider instance using trigger_provider.py. Of course, you'd write your own.
|
|
32
34
|
instance_capacity: max # tickers this provider instance will handle.
|
|
33
35
|
ts_delta_observation_ms_threshold: default to 150ms. "Observation Delta" is clock diff between orderbook timestamp, and your local server clock.
|
|
34
36
|
ts_delta_consecutive_ms_threshold: default to 150ms. "Consecutive Delta" is time elapsed between consecutive orderbook updates.
|
|
@@ -126,16 +128,17 @@ async def instantiate_exhange(
|
|
|
126
128
|
) -> Exchange:
|
|
127
129
|
if old_exchange:
|
|
128
130
|
await old_exchange.close() # type: ignore Otherwise, Error: Cannot access attribute "close" for class "Exchange Attribute "close" is unknown
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
131
|
+
_exchange_name = exchange_name.split('_')[0]
|
|
132
|
+
|
|
133
|
+
exchange = await async_instantiate_exchange(
|
|
134
|
+
gateway_id = _exchange_name,
|
|
135
|
+
default_type = market_type,
|
|
136
|
+
api_key=None, # type: ignore
|
|
137
|
+
secret=None, # type: ignore
|
|
138
|
+
passphrase=None # type: ignore
|
|
139
|
+
)
|
|
137
140
|
exchange.name = exchange_name # type: ignore Otherwise, Error: Cannot assign to attribute "name" for class "binance" "str" is not assignable to "None"
|
|
138
|
-
return exchange
|
|
141
|
+
return exchange # type: ignore
|
|
139
142
|
|
|
140
143
|
def log(message : str, log_level : LogLevel = LogLevel.INFO):
|
|
141
144
|
if log_level.value<LogLevel.WARNING.value:
|
|
@@ -259,7 +262,7 @@ class OrderBook:
|
|
|
259
262
|
}
|
|
260
263
|
|
|
261
264
|
data['best_ask'] = min(data['asks'])
|
|
262
|
-
data['best_bid'] =
|
|
265
|
+
data['best_bid'] = max(data['bids'])
|
|
263
266
|
return data
|
|
264
267
|
|
|
265
268
|
class ThreadTask:
|
|
@@ -348,7 +351,7 @@ async def main():
|
|
|
348
351
|
candles_publish_topic : str = param['mds']['topics']['candles_publish_topic']
|
|
349
352
|
redis_pubsub : PubSub = init_redis_channel_subscription(redis_client, partition_assign_topic)
|
|
350
353
|
|
|
351
|
-
log(f"orderbooks_provider {param['provider_id']} started, waiting for trigger. (Can use
|
|
354
|
+
log(f"orderbooks_provider {param['provider_id']} started, waiting for trigger. (Can use trigger_provider.py to trigger it)")
|
|
352
355
|
|
|
353
356
|
tasks = []
|
|
354
357
|
for message in redis_pubsub.listen():
|