siglab-py 0.1.30__py3-none-any.whl → 0.6.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siglab_py/algo/__init__.py +0 -0
- siglab_py/algo/macdrsi_crosses_15m_tc_strategy.py +107 -0
- siglab_py/algo/strategy_base.py +122 -0
- siglab_py/algo/strategy_executor.py +1308 -0
- siglab_py/algo/tp_algo.py +529 -0
- siglab_py/backtests/__init__.py +0 -0
- siglab_py/backtests/backtest_core.py +2405 -0
- siglab_py/backtests/coinflip_15m_crypto.py +432 -0
- siglab_py/backtests/fibonacci_d_mv_crypto.py +541 -0
- siglab_py/backtests/macdrsi_crosses_15m_tc_crypto.py +473 -0
- siglab_py/constants.py +26 -1
- siglab_py/exchanges/binance.py +38 -0
- siglab_py/exchanges/deribit.py +83 -0
- siglab_py/exchanges/futubull.py +12 -2
- siglab_py/market_data_providers/candles_provider.py +11 -10
- siglab_py/market_data_providers/candles_ta_provider.py +5 -5
- siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +4 -4
- siglab_py/market_data_providers/futu_candles_ta_to_csv.py +7 -2
- siglab_py/market_data_providers/google_monitor.py +320 -0
- siglab_py/market_data_providers/orderbooks_provider.py +15 -12
- siglab_py/market_data_providers/tg_monitor.py +428 -0
- siglab_py/market_data_providers/{test_provider.py → trigger_provider.py} +9 -8
- siglab_py/ordergateway/client.py +172 -41
- siglab_py/ordergateway/encrypt_keys_util.py +1 -1
- siglab_py/ordergateway/gateway.py +456 -347
- siglab_py/ordergateway/test_ordergateway.py +8 -7
- siglab_py/tests/integration/market_data_util_tests.py +75 -2
- siglab_py/tests/unit/analytic_util_tests.py +47 -12
- siglab_py/tests/unit/market_data_util_tests.py +45 -1
- siglab_py/tests/unit/simple_math_tests.py +252 -0
- siglab_py/tests/unit/trading_util_tests.py +65 -0
- siglab_py/util/analytic_util.py +476 -67
- siglab_py/util/datetime_util.py +39 -0
- siglab_py/util/market_data_util.py +528 -98
- siglab_py/util/module_util.py +40 -0
- siglab_py/util/notification_util.py +78 -0
- siglab_py/util/retry_util.py +16 -3
- siglab_py/util/simple_math.py +262 -0
- siglab_py/util/slack_notification_util.py +59 -0
- siglab_py/util/trading_util.py +118 -0
- {siglab_py-0.1.30.dist-info → siglab_py-0.6.33.dist-info}/METADATA +5 -9
- siglab_py-0.6.33.dist-info/RECORD +56 -0
- {siglab_py-0.1.30.dist-info → siglab_py-0.6.33.dist-info}/WHEEL +1 -1
- siglab_py-0.1.30.dist-info/RECORD +0 -34
- {siglab_py-0.1.30.dist-info → siglab_py-0.6.33.dist-info}/top_level.txt +0 -0
|
@@ -21,7 +21,7 @@ from ccxt.okx import okx
|
|
|
21
21
|
from ccxt.bybit import bybit
|
|
22
22
|
from ccxt.base.exchange import Exchange
|
|
23
23
|
|
|
24
|
-
from util.market_data_util import fetch_candles
|
|
24
|
+
from siglab_py.util.market_data_util import fetch_candles
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
'''
|
|
@@ -33,7 +33,7 @@ This script is pypy compatible:
|
|
|
33
33
|
pypy candles_provider.py --provider_id aaa --candle_size 1h --how_many_candles 2169 --redis_ttl_ms 3600000
|
|
34
34
|
|
|
35
35
|
Key parameters you may want to modify:
|
|
36
|
-
provider_id: You can trigger this provider instance using
|
|
36
|
+
provider_id: You can trigger this provider instance using trigger_provider.py. Of course, you'd write your own.
|
|
37
37
|
candle_size: 1m, 5m, 15min, 1h, 1d for example.
|
|
38
38
|
how_many_candles: default to 2169 (24 x 90).
|
|
39
39
|
redis_ttl_ms: This is how long orderbook snapshot will last on redis when provider publishes to it.
|
|
@@ -124,9 +124,9 @@ bybit_exchange = bybit({
|
|
|
124
124
|
})
|
|
125
125
|
|
|
126
126
|
exchanges = {
|
|
127
|
-
f"
|
|
128
|
-
f"
|
|
129
|
-
f"
|
|
127
|
+
f"binance" : binance_exchange,
|
|
128
|
+
f"okx" : okx_exchange,
|
|
129
|
+
f"bybit" : bybit_exchange
|
|
130
130
|
}
|
|
131
131
|
|
|
132
132
|
def log(message : str, log_level : LogLevel = LogLevel.INFO):
|
|
@@ -209,6 +209,7 @@ def process_universe(
|
|
|
209
209
|
last_fetch_ts = last_fetch.iloc[-1]['timestamp_ms']/1000 # type: ignore Otherwise, Error: Cannot access attribute "iloc" for class "None"
|
|
210
210
|
candle_size = param['candle_size']
|
|
211
211
|
interval = candle_size[-1]
|
|
212
|
+
num_intervals_per_candle = int(candle_size.replace(interval,""))
|
|
212
213
|
number_intervals = param['how_many_candles']
|
|
213
214
|
|
|
214
215
|
start_date : datetime = datetime.now()
|
|
@@ -216,7 +217,7 @@ def process_universe(
|
|
|
216
217
|
if interval=="m":
|
|
217
218
|
end_date = datetime.now()
|
|
218
219
|
end_date = datetime(end_date.year, end_date.month, end_date.day, end_date.hour, end_date.minute, 0)
|
|
219
|
-
start_date = end_date + timedelta(minutes=-number_intervals)
|
|
220
|
+
start_date = end_date + timedelta(minutes=-num_intervals_per_candle*number_intervals)
|
|
220
221
|
|
|
221
222
|
num_sec_since_last_fetch = (end_date.timestamp() - last_fetch_ts) if last_fetch_ts else sys.maxsize
|
|
222
223
|
fetch_again = True if num_sec_since_last_fetch >= 60 / 10 else False
|
|
@@ -224,7 +225,7 @@ def process_universe(
|
|
|
224
225
|
elif interval=="h":
|
|
225
226
|
end_date = datetime.now()
|
|
226
227
|
end_date = datetime(end_date.year, end_date.month, end_date.day, end_date.hour, 0, 0)
|
|
227
|
-
start_date = end_date + timedelta(hours=-number_intervals)
|
|
228
|
+
start_date = end_date + timedelta(hours=-num_intervals_per_candle*number_intervals)
|
|
228
229
|
|
|
229
230
|
num_sec_since_last_fetch = (end_date.timestamp() - last_fetch_ts) if last_fetch_ts else sys.maxsize
|
|
230
231
|
fetch_again = True if num_sec_since_last_fetch >= 60*60 / 10 else False
|
|
@@ -232,7 +233,7 @@ def process_universe(
|
|
|
232
233
|
elif interval=="d":
|
|
233
234
|
end_date = datetime.now()
|
|
234
235
|
end_date = datetime(end_date.year, end_date.month, end_date.day, 0, 0, 0)
|
|
235
|
-
start_date = end_date + timedelta(days=-number_intervals)
|
|
236
|
+
start_date = end_date + timedelta(days=-num_intervals_per_candle*number_intervals)
|
|
236
237
|
|
|
237
238
|
num_sec_since_last_fetch = (end_date.timestamp() - last_fetch_ts) if last_fetch_ts else sys.maxsize
|
|
238
239
|
fetch_again = True if num_sec_since_last_fetch >= 24*60*60 / 10 else False
|
|
@@ -284,7 +285,7 @@ def process_universe(
|
|
|
284
285
|
|
|
285
286
|
redis_set_elapsed_ms = int((time.time() - start) *1000)
|
|
286
287
|
|
|
287
|
-
log(f"published candles {this_row_header} {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms")
|
|
288
|
+
log(f"published candles {candles[ticker].shape[0]} rows. {this_row_header} {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms")
|
|
288
289
|
|
|
289
290
|
except Exception as loop_error:
|
|
290
291
|
log(f"Failed to process {this_row_header}. Error: {loop_error} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}")
|
|
@@ -318,7 +319,7 @@ async def main():
|
|
|
318
319
|
self.universe_reload_id = universe_reload_id
|
|
319
320
|
task = None
|
|
320
321
|
|
|
321
|
-
log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use
|
|
322
|
+
log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use trigger_provider.py to trigger it)")
|
|
322
323
|
|
|
323
324
|
universe_reload_id = 1
|
|
324
325
|
for message in redis_pubsub.listen():
|
|
@@ -17,7 +17,7 @@ import pandas as pd
|
|
|
17
17
|
import numpy as np
|
|
18
18
|
from redis import StrictRedis
|
|
19
19
|
|
|
20
|
-
from util.analytic_util import compute_candles_stats
|
|
20
|
+
from siglab_py.util.analytic_util import compute_candles_stats
|
|
21
21
|
|
|
22
22
|
'''
|
|
23
23
|
candles_provider.py will feed candles to redis.
|
|
@@ -219,18 +219,18 @@ def work(
|
|
|
219
219
|
https://redis.io/commands/set/
|
|
220
220
|
'''
|
|
221
221
|
expiry_sec : int = 0
|
|
222
|
-
if candle_size=="m":
|
|
222
|
+
if candle_size[-1]=="m":
|
|
223
223
|
expiry_sec = 60 + 60*15
|
|
224
|
-
elif candle_size=="h":
|
|
224
|
+
elif candle_size[-1]=="h":
|
|
225
225
|
expiry_sec = 60*60 + 60*15
|
|
226
|
-
elif candle_size=="d":
|
|
226
|
+
elif candle_size[-1]=="d":
|
|
227
227
|
expiry_sec = 60*60*24
|
|
228
228
|
expiry_sec += 60*15 # additional 15min
|
|
229
229
|
|
|
230
230
|
redis_client.set(name=publish_key, value=json.dumps(data).encode('utf-8'), ex=expiry_sec)
|
|
231
231
|
redis_set_elapsed_ms = int((time.time() - start) *1000)
|
|
232
232
|
|
|
233
|
-
log(f"published candles {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms, compute_candles_stats_elapsed_ms: {compute_candles_stats_elapsed_ms}")
|
|
233
|
+
log(f"published candles {pd_candles.shape[0]} rows. {publish_key} {sys.getsizeof(data, -1)} bytes to mds elapsed {redis_set_elapsed_ms} ms, compute_candles_stats_elapsed_ms: {compute_candles_stats_elapsed_ms}")
|
|
234
234
|
else:
|
|
235
235
|
log(f"{s_key} message with hash {message_hash} been processed previously.")
|
|
236
236
|
|
|
@@ -26,8 +26,8 @@ from siglab_py.util.analytic_util import compute_candles_stats
|
|
|
26
26
|
'''
|
|
27
27
|
Usage:
|
|
28
28
|
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
29
|
-
python ccxt_candles_ta_to_csv.py --exchange_name
|
|
30
|
-
|
|
29
|
+
python ccxt_candles_ta_to_csv.py --exchange_name okx --symbol BTC/USDT:USDT --candle_size 1h --end_date "2025-04-22 0:0:0" --start_date "2024-01-01 0:0:0" --default_type linear --compute_ta Y --pypy_compatible N
|
|
30
|
+
|
|
31
31
|
(Remember: python -mpip install siglab_py)
|
|
32
32
|
|
|
33
33
|
This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
|
|
@@ -60,8 +60,8 @@ If debugging from VSCode, launch.json:
|
|
|
60
60
|
"args" : [
|
|
61
61
|
"--exchange_name", "bybit",
|
|
62
62
|
"--symbol", "BTC/USDT:USDT",
|
|
63
|
-
"--end_date", "2025-
|
|
64
|
-
"--start_date", "2024-
|
|
63
|
+
"--end_date", "2025-04-22 0:0:0",
|
|
64
|
+
"--start_date", "2024-01-01 0:0:0",
|
|
65
65
|
"--default_type", "linear",
|
|
66
66
|
"--compute_ta", "Y",
|
|
67
67
|
"--pypy_compatible", "N"
|
|
@@ -17,12 +17,17 @@ from siglab_py.util.analytic_util import compute_candles_stats
|
|
|
17
17
|
'''
|
|
18
18
|
Usage:
|
|
19
19
|
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
20
|
-
python futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
20
|
+
python futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --candle_size 1h --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
21
|
+
python futu_candles_ta_to_csv.py --symbol HK.02840 --end_date "2025-07-30 0:0:0" --start_date "2018-01-01 0:0:0" --candle_size 1h --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
22
|
+
python futu_candles_ta_to_csv.py --symbol AAPL --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --candle_size 1h --market US --trdmarket US --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible N
|
|
21
23
|
|
|
22
|
-
|
|
24
|
+
Gold contracts? Note, symbol is case sensitive with Futu. So, "GCmain" is correct, "GCMain" is in-correct.
|
|
25
|
+
python futu_candles_ta_to_csv.py --symbol US.GCmain --end_date "2025-03-11 0:0:0" --start_date "2021-03-11 0:0:0" --market US --trdmarket FUTURES --security_firm FUTUSECURITIES --security_type FUTURE --compute_ta Y --pypy_compatible N
|
|
23
26
|
|
|
24
27
|
(Remember: python -mpip install siglab_py)
|
|
25
28
|
|
|
29
|
+
Gold future contract specification: https://www.futunn.com/en/futures/GCMAIN-US/contract-specs
|
|
30
|
+
|
|
26
31
|
This script is pypy compatible. Set "pypy_compatible" to True, in which case "compute_candles_stats" will skip calculation for TAs which requires: scipy, statsmodels, scikit-learn, sklearn.preprocessing
|
|
27
32
|
pypy futu_candles_ta_to_csv.py --symbol HK.00700 --end_date "2025-03-11 0:0:0" --start_date "2024-03-11 0:0:0" --market HK --trdmarket HK --security_firm FUTUSECURITIES --security_type STOCK --compute_ta Y --pypy_compatible Y
|
|
28
33
|
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import sys
|
|
3
|
+
import traceback
|
|
4
|
+
import os
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import hashlib
|
|
8
|
+
import re
|
|
9
|
+
from datetime import datetime, timedelta, timezone
|
|
10
|
+
import time
|
|
11
|
+
import pytz
|
|
12
|
+
import arrow
|
|
13
|
+
from enum import Enum
|
|
14
|
+
import logging
|
|
15
|
+
import requests
|
|
16
|
+
from typing import Dict, Optional, Set, Any, Union, List
|
|
17
|
+
from redis import StrictRedis
|
|
18
|
+
|
|
19
|
+
from siglab_py.util.notification_util import dispatch_notification
|
|
20
|
+
|
|
21
|
+
current_filename = os.path.basename(__file__)
|
|
22
|
+
|
|
23
|
+
'''
|
|
24
|
+
google_monitor fetches messages from particular query. Then:
|
|
25
|
+
a. Save (and accumulate) messages to message cache file (No duplicates) for further analysis.
|
|
26
|
+
message_cache_file: str = f"google_search_messages.json"
|
|
27
|
+
|
|
28
|
+
b. If any of keywords in message_keywords_filter matches words in message (--message_keywords_filter):
|
|
29
|
+
- Publish to redis for strategy consumption, topic: param['mds']['topics']['google_alert']
|
|
30
|
+
- Dispatch slack alert
|
|
31
|
+
- If scripts runs on Windows, play a wav file (Feels free make modification play sounds on Ubuntu for example)
|
|
32
|
+
|
|
33
|
+
Usage:
|
|
34
|
+
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
35
|
+
python google_monitor.py --apikey xxx --search_engine_id yyy --query "site:twitter.com @user_id1 @user_id2 some topic" --slack_info_url=https://hooks.slack.com/services/xxx --slack_critial_url=https://hooks.slack.com/services/xxx --slack_alert_url=https://hooks.slack.com/services/xxx
|
|
36
|
+
|
|
37
|
+
alert_wav_path
|
|
38
|
+
Point it to wav file for alert notification. It's using 'winsound', i.e. Windows only.
|
|
39
|
+
Set to None otherwise.
|
|
40
|
+
|
|
41
|
+
Google API: https://console.cloud.google.com/apis/credentials?project=YOUR_PROJECT
|
|
42
|
+
name: YOUR_API_KEY_NAME
|
|
43
|
+
apikey: ?????
|
|
44
|
+
|
|
45
|
+
Google Search Engine
|
|
46
|
+
To create
|
|
47
|
+
name: siglab_py_search: https://programmablesearchengine.google.com/controlpanel/create
|
|
48
|
+
<script async src="https://cse.google.com/cse.js?cx=YOUR_SEARCH_ENGINE_ID">
|
|
49
|
+
</script>
|
|
50
|
+
<div class="gcse-search"></div>
|
|
51
|
+
Then enable it: https://console.developers.google.com/apis/api/customsearch.googleapis.com/overview?project=?????
|
|
52
|
+
|
|
53
|
+
launch.json for Debugging from VSCode:
|
|
54
|
+
{
|
|
55
|
+
"version": "0.2.0",
|
|
56
|
+
"configurations": [
|
|
57
|
+
{
|
|
58
|
+
"name": "Python: Current File",
|
|
59
|
+
"type": "python",
|
|
60
|
+
"request": "launch",
|
|
61
|
+
"program": "${file}",
|
|
62
|
+
"console": "integratedTerminal",
|
|
63
|
+
"justMyCode": false,
|
|
64
|
+
"args" : [
|
|
65
|
+
"--apikey", "xxx",
|
|
66
|
+
"--search_engine_id", "yyy",
|
|
67
|
+
"--query", "site:twitter.com @user_id1 @user_id2 some topic",
|
|
68
|
+
"--slack_info_url", "https://hooks.slack.com/services/xxx",
|
|
69
|
+
"--slack_critial_url", "https://hooks.slack.com/services/xxx",
|
|
70
|
+
"--slack_alert_url", "https://hooks.slack.com/services/xxx",
|
|
71
|
+
],
|
|
72
|
+
}
|
|
73
|
+
]
|
|
74
|
+
}
|
|
75
|
+
'''
|
|
76
|
+
|
|
77
|
+
param: Dict[str, Any] = {
|
|
78
|
+
'apikey': os.getenv('GOOGLE_APIKEY', 'xxx'),
|
|
79
|
+
'search_engine_id': os.getenv('GOOGLE_SEARCH_ENGINE_ID', 'xxx'),
|
|
80
|
+
'num_results' : 10,
|
|
81
|
+
'query' : '',
|
|
82
|
+
'alert_wav_path' : r"d:\sounds\terrible.wav",
|
|
83
|
+
"num_shouts" : 5, # How many times 'alert_wav_path' is played
|
|
84
|
+
"loop_freq_ms" : 1000*60*15, # Google allow max 100 calls per day free.
|
|
85
|
+
'current_filename' : current_filename,
|
|
86
|
+
|
|
87
|
+
'notification' : {
|
|
88
|
+
'footer' : None,
|
|
89
|
+
|
|
90
|
+
# slack webhook url's for notifications
|
|
91
|
+
'slack' : {
|
|
92
|
+
'info' : { 'webhook_url' : None },
|
|
93
|
+
'critical' : { 'webhook_url' : None },
|
|
94
|
+
'alert' : { 'webhook_url' : None },
|
|
95
|
+
}
|
|
96
|
+
},
|
|
97
|
+
|
|
98
|
+
'mds': {
|
|
99
|
+
'topics': {
|
|
100
|
+
'tg_alert': 'tg_alert'
|
|
101
|
+
},
|
|
102
|
+
'redis': {
|
|
103
|
+
'host': 'localhost',
|
|
104
|
+
'port': 6379,
|
|
105
|
+
'db': 0,
|
|
106
|
+
'ttl_ms': 1000 * 60 * 15
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
class LogLevel(Enum):
|
|
112
|
+
CRITICAL = 50
|
|
113
|
+
ERROR = 40
|
|
114
|
+
WARNING = 30
|
|
115
|
+
INFO = 20
|
|
116
|
+
DEBUG = 10
|
|
117
|
+
NOTSET = 0
|
|
118
|
+
|
|
119
|
+
logging.Formatter.converter = time.gmtime
|
|
120
|
+
logger: logging.Logger = logging.getLogger()
|
|
121
|
+
log_level: int = logging.INFO
|
|
122
|
+
logger.setLevel(log_level)
|
|
123
|
+
format_str: str = '%(asctime)s %(message)s'
|
|
124
|
+
formatter: logging.Formatter = logging.Formatter(format_str)
|
|
125
|
+
sh: logging.StreamHandler = logging.StreamHandler()
|
|
126
|
+
sh.setLevel(log_level)
|
|
127
|
+
sh.setFormatter(formatter)
|
|
128
|
+
logger.addHandler(sh)
|
|
129
|
+
|
|
130
|
+
def log(message: str, log_level: LogLevel = LogLevel.INFO) -> None:
|
|
131
|
+
if log_level.value < LogLevel.WARNING.value:
|
|
132
|
+
logger.info(f"{datetime.now()} {message}")
|
|
133
|
+
elif log_level.value == LogLevel.WARNING.value:
|
|
134
|
+
logger.warning(f"{datetime.now()} {message}")
|
|
135
|
+
elif log_level.value == LogLevel.ERROR.value:
|
|
136
|
+
logger.error(f"{datetime.now()} {message}")
|
|
137
|
+
|
|
138
|
+
def parse_args():
|
|
139
|
+
parser = argparse.ArgumentParser() # type: ignore
|
|
140
|
+
parser.add_argument("--apikey", help="API key", default=None)
|
|
141
|
+
parser.add_argument("--search_engine_id", help="Google search engine ID", default=None)
|
|
142
|
+
parser.add_argument("--num_results", help="Max number items to fetch", default=10)
|
|
143
|
+
parser.add_argument("--query", help="Query - what are you looking for?", default=None)
|
|
144
|
+
parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
|
|
145
|
+
parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
|
|
146
|
+
parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
|
|
147
|
+
|
|
148
|
+
args = parser.parse_args()
|
|
149
|
+
|
|
150
|
+
param['apikey'] = args.apikey
|
|
151
|
+
param['search_engine_id'] = args.search_engine_id
|
|
152
|
+
param['num_results'] = args.num_results
|
|
153
|
+
param['query'] = args.query
|
|
154
|
+
|
|
155
|
+
param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
|
|
156
|
+
param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
|
|
157
|
+
param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
|
|
158
|
+
|
|
159
|
+
param['notification']['footer'] = f"From {param['current_filename']}"
|
|
160
|
+
|
|
161
|
+
print(f"Startup args: {args}") # Dont use logger, not yet setup yet.
|
|
162
|
+
print(f"param: {print(json.dumps(param, indent=2))}")
|
|
163
|
+
|
|
164
|
+
def init_redis_client() -> StrictRedis:
|
|
165
|
+
redis_client : StrictRedis = StrictRedis(
|
|
166
|
+
host = param['mds']['redis']['host'],
|
|
167
|
+
port = param['mds']['redis']['port'],
|
|
168
|
+
db = 0,
|
|
169
|
+
ssl = False
|
|
170
|
+
)
|
|
171
|
+
try:
|
|
172
|
+
redis_client.keys()
|
|
173
|
+
except ConnectionError as redis_conn_error:
|
|
174
|
+
err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
|
|
175
|
+
log(f"Failed to init redis connection. Will skip publishes to redis. {err_msg}")
|
|
176
|
+
redis_client = None # type: ignore
|
|
177
|
+
|
|
178
|
+
return redis_client
|
|
179
|
+
|
|
180
|
+
def search_google_custom(query, api_key, search_engine_id, num_results=10):
|
|
181
|
+
url = 'https://www.googleapis.com/customsearch/v1'
|
|
182
|
+
params = {
|
|
183
|
+
'key': api_key,
|
|
184
|
+
'cx': search_engine_id,
|
|
185
|
+
'q': query,
|
|
186
|
+
'num': num_results,
|
|
187
|
+
'sort': 'date',
|
|
188
|
+
'dateRestrict': 'd1' # Restrict to most recent (adjust as needed: d1=day, m1=month, etc.)
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
response = requests.get(url, params=params)
|
|
192
|
+
|
|
193
|
+
if response.status_code == 200:
|
|
194
|
+
return response.json()
|
|
195
|
+
else:
|
|
196
|
+
log(f"Query error: {response.status_code} - {response.text}")
|
|
197
|
+
return None
|
|
198
|
+
|
|
199
|
+
async def main() -> None:
|
|
200
|
+
parse_args()
|
|
201
|
+
|
|
202
|
+
message_cache_file: str = f"google_search_messages.json"
|
|
203
|
+
log(f"message_cache_file: {message_cache_file}")
|
|
204
|
+
|
|
205
|
+
notification_params : Dict[str, Any] = param['notification']
|
|
206
|
+
|
|
207
|
+
processed_messages : List[Dict[str, Any]] = []
|
|
208
|
+
seen_hashes : Set[str] = set()
|
|
209
|
+
if os.path.exists(message_cache_file):
|
|
210
|
+
with open(message_cache_file, 'r', encoding='utf-8') as f:
|
|
211
|
+
lines = f.readlines()
|
|
212
|
+
for line in lines:
|
|
213
|
+
message_data = json.loads(line)
|
|
214
|
+
message_hash: str = hashlib.sha256(message_data['message'].encode('utf-8')).hexdigest()
|
|
215
|
+
|
|
216
|
+
message_data['datetime'] = pytz.UTC.localize(arrow.get(message_data['datetime']).datetime.replace(tzinfo=None))
|
|
217
|
+
|
|
218
|
+
if message_hash not in seen_hashes:
|
|
219
|
+
seen_hashes.add(message_hash)
|
|
220
|
+
processed_messages.append(message_data)
|
|
221
|
+
|
|
222
|
+
processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
redis_client: Optional[StrictRedis] = init_redis_client()
|
|
226
|
+
except Exception as redis_err:
|
|
227
|
+
redis_client = None
|
|
228
|
+
log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
|
|
229
|
+
|
|
230
|
+
while True:
|
|
231
|
+
try:
|
|
232
|
+
results = search_google_custom(param['query'], param['apikey'], param['search_engine_id'], param['num_results'])
|
|
233
|
+
|
|
234
|
+
if results:
|
|
235
|
+
if 'items' in results:
|
|
236
|
+
for item in results['items']:
|
|
237
|
+
title = item.get('title', 'No title')
|
|
238
|
+
snippet = item.get('snippet', 'No snippet')
|
|
239
|
+
link = item.get('link', 'No link')
|
|
240
|
+
published_date = item.get('pagemap', {}).get('metatags', [{}])[0].get('article:published_time', 'No date')
|
|
241
|
+
|
|
242
|
+
dt_message = datetime.now()
|
|
243
|
+
pattern = r'^\d+\s*(?:days?|day?|hours?|hour?|minutes?|minute?|seconds?|second?|h|m|s)\s*(?:ago)?.*?([A-Za-z]+\s+\d+,\s+\d{4},\s+\d+:\d+\s+[AP]M\s+ET)'
|
|
244
|
+
match = re.match(pattern, snippet)
|
|
245
|
+
if published_date == 'No date' and match:
|
|
246
|
+
published_date = match.group(1)
|
|
247
|
+
dt_message = datetime.strptime(published_date, '%b %d, %Y, %I:%M %p ET')
|
|
248
|
+
|
|
249
|
+
snippet = re.sub(pattern, '', snippet).strip()
|
|
250
|
+
|
|
251
|
+
timestamp_ms = int(dt_message.timestamp() * 1000)
|
|
252
|
+
message_data: Dict[str, Any] = {
|
|
253
|
+
"timestamp_ms": timestamp_ms,
|
|
254
|
+
"datetime": dt_message.isoformat(), # Always in UTC
|
|
255
|
+
"title" : title,
|
|
256
|
+
"message": snippet,
|
|
257
|
+
"url" : link
|
|
258
|
+
}
|
|
259
|
+
json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
|
|
260
|
+
message_hash: str = hashlib.sha256(snippet.encode('utf-8')).hexdigest()
|
|
261
|
+
if (message_hash not in seen_hashes):
|
|
262
|
+
seen_hashes.add(message_hash)
|
|
263
|
+
processed_messages.append(message_data)
|
|
264
|
+
|
|
265
|
+
log(f"{message_data}")
|
|
266
|
+
|
|
267
|
+
dispatch_notification(title=f"{param['current_filename']} Incoming! {title}", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
|
|
268
|
+
|
|
269
|
+
with open(message_cache_file, 'a', encoding='utf-8') as f:
|
|
270
|
+
json.dump(message_data, f, ensure_ascii=False)
|
|
271
|
+
f.write('\n')
|
|
272
|
+
|
|
273
|
+
if param['alert_wav_path']and sys.platform == 'win32':
|
|
274
|
+
import winsound
|
|
275
|
+
for _ in range(param['num_shouts']):
|
|
276
|
+
winsound.PlaySound(param['alert_wav_path'], winsound.SND_FILENAME)
|
|
277
|
+
|
|
278
|
+
if redis_client:
|
|
279
|
+
try:
|
|
280
|
+
publish_topic = f"google_search"
|
|
281
|
+
redis_client.publish(publish_topic, json_str)
|
|
282
|
+
redis_client.setex(message_hash, param['mds']['redis']['ttl_ms'] // 1000, json_str)
|
|
283
|
+
log(f"Published message {json_str} to Redis topic {publish_topic}", LogLevel.INFO)
|
|
284
|
+
except Exception as e:
|
|
285
|
+
log(f"Failed to publish to Redis: {str(e)}", LogLevel.ERROR)
|
|
286
|
+
|
|
287
|
+
await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
|
|
288
|
+
|
|
289
|
+
if processed_messages:
|
|
290
|
+
oldest_message: Dict[str, Any] = min(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
291
|
+
newest_message: Dict[str, Any] = max(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
292
|
+
log(
|
|
293
|
+
json.dumps(
|
|
294
|
+
{
|
|
295
|
+
'num_messages': len(processed_messages),
|
|
296
|
+
'oldest': {
|
|
297
|
+
'timestamp_ms': oldest_message['timestamp_ms'],
|
|
298
|
+
'datetime': datetime.fromtimestamp(int(oldest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
299
|
+
},
|
|
300
|
+
'latest': {
|
|
301
|
+
'timestamp_ms': newest_message['timestamp_ms'],
|
|
302
|
+
'datetime': datetime.fromtimestamp(int(newest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
303
|
+
}
|
|
304
|
+
}, indent=2
|
|
305
|
+
),
|
|
306
|
+
LogLevel.INFO
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
except Exception as e:
|
|
310
|
+
log(f"Oops {str(e)} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}", LogLevel.ERROR)
|
|
311
|
+
finally:
|
|
312
|
+
await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
|
|
313
|
+
|
|
314
|
+
if __name__ == '__main__':
|
|
315
|
+
try:
|
|
316
|
+
asyncio.run(main())
|
|
317
|
+
except KeyboardInterrupt:
|
|
318
|
+
log("Stopped by user", LogLevel.INFO)
|
|
319
|
+
except Exception as e:
|
|
320
|
+
log(f"Unexpected error: {str(e)}", LogLevel.ERROR)
|
|
@@ -20,6 +20,8 @@ from redis.client import PubSub
|
|
|
20
20
|
from ccxt.base.exchange import Exchange
|
|
21
21
|
import ccxt.pro as ccxtpro
|
|
22
22
|
|
|
23
|
+
from siglab_py.util.market_data_util import async_instantiate_exchange
|
|
24
|
+
|
|
23
25
|
'''
|
|
24
26
|
To start from command prompt:
|
|
25
27
|
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab
|
|
@@ -28,7 +30,7 @@ To start from command prompt:
|
|
|
28
30
|
This script is pypy compatible.
|
|
29
31
|
|
|
30
32
|
Key parameters you may want to modify:
|
|
31
|
-
provider_id: You can trigger this provider instance using
|
|
33
|
+
provider_id: You can trigger this provider instance using trigger_provider.py. Of course, you'd write your own.
|
|
32
34
|
instance_capacity: max # tickers this provider instance will handle.
|
|
33
35
|
ts_delta_observation_ms_threshold: default to 150ms. "Observation Delta" is clock diff between orderbook timestamp, and your local server clock.
|
|
34
36
|
ts_delta_consecutive_ms_threshold: default to 150ms. "Consecutive Delta" is time elapsed between consecutive orderbook updates.
|
|
@@ -126,16 +128,17 @@ async def instantiate_exhange(
|
|
|
126
128
|
) -> Exchange:
|
|
127
129
|
if old_exchange:
|
|
128
130
|
await old_exchange.close() # type: ignore Otherwise, Error: Cannot access attribute "close" for class "Exchange Attribute "close" is unknown
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
131
|
+
_exchange_name = exchange_name.split('_')[0]
|
|
132
|
+
|
|
133
|
+
exchange = await async_instantiate_exchange(
|
|
134
|
+
gateway_id = _exchange_name,
|
|
135
|
+
default_type = market_type,
|
|
136
|
+
api_key=None, # type: ignore
|
|
137
|
+
secret=None, # type: ignore
|
|
138
|
+
passphrase=None # type: ignore
|
|
139
|
+
)
|
|
137
140
|
exchange.name = exchange_name # type: ignore Otherwise, Error: Cannot assign to attribute "name" for class "binance" "str" is not assignable to "None"
|
|
138
|
-
return exchange
|
|
141
|
+
return exchange # type: ignore
|
|
139
142
|
|
|
140
143
|
def log(message : str, log_level : LogLevel = LogLevel.INFO):
|
|
141
144
|
if log_level.value<LogLevel.WARNING.value:
|
|
@@ -259,7 +262,7 @@ class OrderBook:
|
|
|
259
262
|
}
|
|
260
263
|
|
|
261
264
|
data['best_ask'] = min(data['asks'])
|
|
262
|
-
data['best_bid'] =
|
|
265
|
+
data['best_bid'] = max(data['bids'])
|
|
263
266
|
return data
|
|
264
267
|
|
|
265
268
|
class ThreadTask:
|
|
@@ -348,7 +351,7 @@ async def main():
|
|
|
348
351
|
candles_publish_topic : str = param['mds']['topics']['candles_publish_topic']
|
|
349
352
|
redis_pubsub : PubSub = init_redis_channel_subscription(redis_client, partition_assign_topic)
|
|
350
353
|
|
|
351
|
-
log(f"orderbooks_provider {param['provider_id']} started, waiting for trigger. (Can use
|
|
354
|
+
log(f"orderbooks_provider {param['provider_id']} started, waiting for trigger. (Can use trigger_provider.py to trigger it)")
|
|
352
355
|
|
|
353
356
|
tasks = []
|
|
354
357
|
for message in redis_pubsub.listen():
|