siglab-py 0.1.19__py3-none-any.whl → 0.6.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siglab_py/algo/__init__.py +0 -0
- siglab_py/algo/macdrsi_crosses_15m_tc_strategy.py +107 -0
- siglab_py/algo/strategy_base.py +122 -0
- siglab_py/algo/strategy_executor.py +1308 -0
- siglab_py/algo/tp_algo.py +529 -0
- siglab_py/backtests/__init__.py +0 -0
- siglab_py/backtests/backtest_core.py +2405 -0
- siglab_py/backtests/coinflip_15m_crypto.py +432 -0
- siglab_py/backtests/fibonacci_d_mv_crypto.py +541 -0
- siglab_py/backtests/macdrsi_crosses_15m_tc_crypto.py +473 -0
- siglab_py/constants.py +26 -1
- siglab_py/exchanges/binance.py +38 -0
- siglab_py/exchanges/deribit.py +83 -0
- siglab_py/exchanges/futubull.py +33 -3
- siglab_py/market_data_providers/candles_provider.py +11 -10
- siglab_py/market_data_providers/candles_ta_provider.py +5 -5
- siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +238 -0
- siglab_py/market_data_providers/futu_candles_ta_to_csv.py +224 -0
- siglab_py/market_data_providers/google_monitor.py +320 -0
- siglab_py/market_data_providers/orderbooks_provider.py +15 -12
- siglab_py/market_data_providers/tg_monitor.py +428 -0
- siglab_py/market_data_providers/{test_provider.py → trigger_provider.py} +9 -8
- siglab_py/ordergateway/client.py +172 -41
- siglab_py/ordergateway/encrypt_keys_util.py +1 -1
- siglab_py/ordergateway/gateway.py +456 -344
- siglab_py/ordergateway/test_ordergateway.py +8 -7
- siglab_py/tests/integration/market_data_util_tests.py +80 -6
- siglab_py/tests/unit/analytic_util_tests.py +67 -4
- siglab_py/tests/unit/market_data_util_tests.py +96 -0
- siglab_py/tests/unit/simple_math_tests.py +252 -0
- siglab_py/tests/unit/trading_util_tests.py +65 -0
- siglab_py/util/analytic_util.py +484 -66
- siglab_py/util/datetime_util.py +39 -0
- siglab_py/util/market_data_util.py +564 -74
- siglab_py/util/module_util.py +40 -0
- siglab_py/util/notification_util.py +78 -0
- siglab_py/util/retry_util.py +16 -3
- siglab_py/util/simple_math.py +262 -0
- siglab_py/util/slack_notification_util.py +59 -0
- siglab_py/util/trading_util.py +118 -0
- {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/METADATA +5 -13
- siglab_py-0.6.33.dist-info/RECORD +56 -0
- {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/WHEEL +1 -1
- siglab_py-0.1.19.dist-info/RECORD +0 -31
- {siglab_py-0.1.19.dist-info → siglab_py-0.6.33.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,428 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import sys
|
|
3
|
+
import traceback
|
|
4
|
+
import os
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import hashlib
|
|
8
|
+
import re
|
|
9
|
+
from datetime import datetime, timedelta, timezone
|
|
10
|
+
import time
|
|
11
|
+
import pytz
|
|
12
|
+
import arrow
|
|
13
|
+
from enum import Enum
|
|
14
|
+
import logging
|
|
15
|
+
from typing import Dict, Optional, Set, Any, Union, List
|
|
16
|
+
from telethon.sync import TelegramClient
|
|
17
|
+
from telethon.errors import SessionPasswordNeededError, FloodWaitError
|
|
18
|
+
from telethon.types import Message
|
|
19
|
+
from redis import StrictRedis
|
|
20
|
+
|
|
21
|
+
from siglab_py.util.notification_util import dispatch_notification
|
|
22
|
+
|
|
23
|
+
current_filename = os.path.basename(__file__)
|
|
24
|
+
|
|
25
|
+
'''
|
|
26
|
+
tg_monitor fetches messages from particular TG channel (--channel_username). Then:
|
|
27
|
+
a. Save (and accumulate) messages to message cache file (No duplicates) for further analysis.
|
|
28
|
+
message_cache_file: str = f"{param['channel_username'].lstrip('@')}_messages.json"
|
|
29
|
+
Note, only messages from senders in param['users_filter'] will be included.
|
|
30
|
+
|
|
31
|
+
b. If any of keywords in message_keywords_filter matches words in message (--message_keywords_filter):
|
|
32
|
+
- Publish to redis for strategy consumption, topic: param['mds']['topics']['tg_alert']
|
|
33
|
+
- Dispatch slack alert
|
|
34
|
+
- If scripts runs on Windows, play a wav file (Feels free make modification play sounds on Ubuntu for example)
|
|
35
|
+
|
|
36
|
+
Usage:
|
|
37
|
+
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
38
|
+
python tg_monitor.py --api_id xxx --api_hash yyy --phone +XXXYYYYYYYY --channel_username @zZz --users_filter "yYy,zZz" --start_date 2025-03-01 --message_keywords_filter "trump,trade war,tariff" --slack_info_url=https://hooks.slack.com/services/xxx --slack_critial_url=https://hooks.slack.com/services/xxx --slack_alert_url=https://hooks.slack.com/services/xxx
|
|
39
|
+
|
|
40
|
+
api_id and api_hash
|
|
41
|
+
Go to https://my.telegram.org/
|
|
42
|
+
It's under "API development tools"
|
|
43
|
+
|
|
44
|
+
phone
|
|
45
|
+
Format: +XXXYYYYYYYY where XXX is country/area code.
|
|
46
|
+
|
|
47
|
+
channel_username and users_filter
|
|
48
|
+
You decide which TG channel, and under that channel what users you're tracking. TG is noisy.
|
|
49
|
+
|
|
50
|
+
start_date
|
|
51
|
+
tg_monitor relies on TelegramClient.get_messages: https://docs.telethon.dev/en/stable/modules/client.html#telethon.client.messages.MessageMethods.get_messages
|
|
52
|
+
As with most API, they generally limit number of entries you can fetch in one go (~100).
|
|
53
|
+
tg_monitor implemented sliding window technique with cutoff = start_date for the first fetch.
|
|
54
|
+
Note, if you increment your sliding window too quickly, you'd miss some messages. If you scan too slowly, it'd take forever. It's a balancing act.
|
|
55
|
+
Atm, here's the general logic how we move the sliding window foward:
|
|
56
|
+
* If fetches return zero message that matches param['users_filter], we move the window forward by an hour.
|
|
57
|
+
* Otherwise, we move the window by adding 5 minutes to timestamp of last message fetched (whether matches users_filter or not) just fetched.
|
|
58
|
+
A decent specification of 'users_filter' will help minimize chances of missed messages.
|
|
59
|
+
Default: None, cutoff date will be set to tm1 in this case if message_cache_file NOT exists.
|
|
60
|
+
Otherwise, it'd default to latest message's datetime, plus a couple minute.
|
|
61
|
+
Set to 'yyyy-MM-dd' if you want to collect more history for analysis.
|
|
62
|
+
Generally, TG allows you fetch around three months history. Anything more, get_messages will return empty array.
|
|
63
|
+
|
|
64
|
+
Regardless, messages you collected is accumulated in 'message_cache_file'.
|
|
65
|
+
|
|
66
|
+
message_keywords_filter
|
|
67
|
+
TG message_keywords_filter: Comma separated list, case-insensitive. Default: None (i.e. no keywords)
|
|
68
|
+
Example, --message_keywords_filter "exploit, attack, hack, breach, compromise, stolen, leak, security incident, phishing, social engineer, withdrawals freeze, frozen"
|
|
69
|
+
|
|
70
|
+
We play a sound, publish to redis..etc, only if message contain any of the keywords in message_keywords_filter.
|
|
71
|
+
Quote around comma separated list.
|
|
72
|
+
|
|
73
|
+
alert_wav_path
|
|
74
|
+
Point it to wav file for alert notification. It's using 'winsound', i.e. Windows only.
|
|
75
|
+
Set to None otherwise.
|
|
76
|
+
|
|
77
|
+
launch.json for Debugging from VSCode:
|
|
78
|
+
{
|
|
79
|
+
"version": "0.2.0",
|
|
80
|
+
"configurations": [
|
|
81
|
+
{
|
|
82
|
+
"name": "Python: Current File",
|
|
83
|
+
"type": "python",
|
|
84
|
+
"request": "launch",
|
|
85
|
+
"program": "${file}",
|
|
86
|
+
"console": "integratedTerminal",
|
|
87
|
+
"justMyCode": false,
|
|
88
|
+
"args" : [
|
|
89
|
+
"--api_id", "xxx",
|
|
90
|
+
"--api_hash", "xxx",
|
|
91
|
+
"--phone", "+XXXYYYYYYYY",
|
|
92
|
+
"--channel_username", "@SomeChannel",
|
|
93
|
+
"--users_filter", "SomeBody",
|
|
94
|
+
"--start_date", "2025-03-01",
|
|
95
|
+
"--message_keywords_filter", "exploit, attack, hack, breach, compromise, stolen, leak, security incident, phishing, social engineer, withdrawals freeze, frozen",
|
|
96
|
+
"--slack_info_url", "https://hooks.slack.com/services/xxx",
|
|
97
|
+
"--slack_critial_url", "https://hooks.slack.com/services/xxx",
|
|
98
|
+
"--slack_alert_url", "https://hooks.slack.com/services/xxx",
|
|
99
|
+
],
|
|
100
|
+
}
|
|
101
|
+
]
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
https://norman-lm-fung.medium.com/monitoring-telegram-channel-tg-monitor-from-siglab-py-f7ec30c2c32e
|
|
105
|
+
'''
|
|
106
|
+
|
|
107
|
+
param: Dict[str, Any] = {
|
|
108
|
+
'api_id': os.getenv('TELEGRAM_API_ID', 'xxx'),
|
|
109
|
+
'api_hash': os.getenv('TELEGRAM_API_HASH', 'xxx'),
|
|
110
|
+
'phone': os.getenv('TELEGRAM_PHONE', '+XXXYYYYYYYY'),
|
|
111
|
+
'channel_username': '@SomeChannel',
|
|
112
|
+
'users_filter' : None,
|
|
113
|
+
'message_keywords_filter': [],
|
|
114
|
+
'start_date': None,
|
|
115
|
+
'alert_wav_path' : r"d:\sounds\terrible.wav",
|
|
116
|
+
"num_shouts" : 5, # How many times 'alert_wav_path' is played
|
|
117
|
+
"loop_freq_ms" : 1000,
|
|
118
|
+
'current_filename' : current_filename,
|
|
119
|
+
|
|
120
|
+
'notification' : {
|
|
121
|
+
'footer' : None,
|
|
122
|
+
|
|
123
|
+
# slack webhook url's for notifications
|
|
124
|
+
'slack' : {
|
|
125
|
+
'info' : { 'webhook_url' : None },
|
|
126
|
+
'critical' : { 'webhook_url' : None },
|
|
127
|
+
'alert' : { 'webhook_url' : None },
|
|
128
|
+
}
|
|
129
|
+
},
|
|
130
|
+
|
|
131
|
+
'mds': {
|
|
132
|
+
'topics': {
|
|
133
|
+
'tg_alert': 'tg_alert'
|
|
134
|
+
},
|
|
135
|
+
'redis': {
|
|
136
|
+
'host': 'localhost',
|
|
137
|
+
'port': 6379,
|
|
138
|
+
'db': 0,
|
|
139
|
+
'ttl_ms': 1000 * 60 * 15
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
class LogLevel(Enum):
|
|
145
|
+
CRITICAL = 50
|
|
146
|
+
ERROR = 40
|
|
147
|
+
WARNING = 30
|
|
148
|
+
INFO = 20
|
|
149
|
+
DEBUG = 10
|
|
150
|
+
NOTSET = 0
|
|
151
|
+
|
|
152
|
+
logging.Formatter.converter = time.gmtime
|
|
153
|
+
logger: logging.Logger = logging.getLogger()
|
|
154
|
+
log_level: int = logging.INFO
|
|
155
|
+
logger.setLevel(log_level)
|
|
156
|
+
format_str: str = '%(asctime)s %(message)s'
|
|
157
|
+
formatter: logging.Formatter = logging.Formatter(format_str)
|
|
158
|
+
sh: logging.StreamHandler = logging.StreamHandler()
|
|
159
|
+
sh.setLevel(log_level)
|
|
160
|
+
sh.setFormatter(formatter)
|
|
161
|
+
logger.addHandler(sh)
|
|
162
|
+
|
|
163
|
+
def log(message: str, log_level: LogLevel = LogLevel.INFO) -> None:
|
|
164
|
+
if log_level.value < LogLevel.WARNING.value:
|
|
165
|
+
logger.info(f"{datetime.now()} {message}")
|
|
166
|
+
elif log_level.value == LogLevel.WARNING.value:
|
|
167
|
+
logger.warning(f"{datetime.now()} {message}")
|
|
168
|
+
elif log_level.value == LogLevel.ERROR.value:
|
|
169
|
+
logger.error(f"{datetime.now()} {message}")
|
|
170
|
+
|
|
171
|
+
def parse_args():
|
|
172
|
+
parser = argparse.ArgumentParser() # type: ignore
|
|
173
|
+
parser.add_argument("--api_id", help="TG api_id", default=None)
|
|
174
|
+
parser.add_argument("--api_hash", help="TG api_hash", default=None)
|
|
175
|
+
parser.add_argument("--phone", help="G Phone tied to TG. Format: +XXXYYYYYYYY where XXX is country/area code.", default=None)
|
|
176
|
+
parser.add_argument("--channel_username", help="TG channel_username", default=None)
|
|
177
|
+
parser.add_argument("--users_filter", help="Comma separated list of TG user names", default=None)
|
|
178
|
+
parser.add_argument("--message_keywords_filter", help="TG message_keywords_filter: Comma separated list, case-insensitive. Default: None (i.e. no keywords)", default=None)
|
|
179
|
+
parser.add_argument("--start_date", help="start_date, format: yyyy-MM-dd. If left to null, cutoff date default to last message's datetime from message cache, or tm1.", default=None)
|
|
180
|
+
|
|
181
|
+
parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
|
|
182
|
+
parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
|
|
183
|
+
parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
|
|
184
|
+
|
|
185
|
+
args = parser.parse_args()
|
|
186
|
+
|
|
187
|
+
param['api_id'] = args.api_id
|
|
188
|
+
param['api_hash'] = args.api_hash
|
|
189
|
+
param['channel_username'] = args.channel_username
|
|
190
|
+
if args.users_filter:
|
|
191
|
+
param['users_filter'] = [ user.lower().strip() for user in args.users_filter.split(',') ]
|
|
192
|
+
param['start_date'] = args.start_date
|
|
193
|
+
|
|
194
|
+
if args.message_keywords_filter:
|
|
195
|
+
param['message_keywords_filter'] = args.message_keywords_filter.split(',')
|
|
196
|
+
|
|
197
|
+
param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
|
|
198
|
+
param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
|
|
199
|
+
param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
|
|
200
|
+
|
|
201
|
+
param['notification']['footer'] = f"From {param['current_filename']} {param['channel_username'].lstrip('@')}"
|
|
202
|
+
|
|
203
|
+
print(f"Startup args: {args}") # Dont use logger, not yet setup yet.
|
|
204
|
+
print(f"param: {print(json.dumps(param, indent=2))}")
|
|
205
|
+
|
|
206
|
+
def init_redis_client() -> StrictRedis:
|
|
207
|
+
redis_client : StrictRedis = StrictRedis(
|
|
208
|
+
host = param['mds']['redis']['host'],
|
|
209
|
+
port = param['mds']['redis']['port'],
|
|
210
|
+
db = 0,
|
|
211
|
+
ssl = False
|
|
212
|
+
)
|
|
213
|
+
try:
|
|
214
|
+
redis_client.keys()
|
|
215
|
+
except ConnectionError as redis_conn_error:
|
|
216
|
+
err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
|
|
217
|
+
log(f"Failed to init redis connection. Will skip publishes to redis. {err_msg}")
|
|
218
|
+
redis_client = None # type: ignore
|
|
219
|
+
|
|
220
|
+
return redis_client
|
|
221
|
+
|
|
222
|
+
async def main() -> None:
|
|
223
|
+
parse_args()
|
|
224
|
+
|
|
225
|
+
session_file: str = f"{param['channel_username'].lstrip('@')}_session"
|
|
226
|
+
message_cache_file: str = f"{param['channel_username'].lstrip('@')}_messages.json"
|
|
227
|
+
log(f"session_file: {session_file}")
|
|
228
|
+
log(f"message_cache_file: {message_cache_file}")
|
|
229
|
+
|
|
230
|
+
notification_params : Dict[str, Any] = param['notification']
|
|
231
|
+
|
|
232
|
+
tm1 : datetime = datetime(datetime.now().year, datetime.now().month, datetime.now().day) + timedelta(days=-1)
|
|
233
|
+
tm1 = tm1.astimezone(pytz.UTC)
|
|
234
|
+
|
|
235
|
+
last_message_date: datetime = tm1
|
|
236
|
+
processed_messages : List[Dict[str, Any]] = []
|
|
237
|
+
seen_hashes : Set[str] = set()
|
|
238
|
+
if os.path.exists(message_cache_file):
|
|
239
|
+
with open(message_cache_file, 'r', encoding='utf-8') as f:
|
|
240
|
+
lines = f.readlines()
|
|
241
|
+
for line in lines:
|
|
242
|
+
message_data = json.loads(line)
|
|
243
|
+
|
|
244
|
+
# json.dumps before converting datetime to type(datetime)
|
|
245
|
+
json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
|
|
246
|
+
message_hash: str = hashlib.sha256(json_str.encode('utf-8')).hexdigest()
|
|
247
|
+
|
|
248
|
+
message_data['datetime'] = pytz.UTC.localize(arrow.get(message_data['datetime']).datetime.replace(tzinfo=None))
|
|
249
|
+
|
|
250
|
+
if message_hash not in seen_hashes:
|
|
251
|
+
seen_hashes.add(message_hash)
|
|
252
|
+
processed_messages.append(message_data)
|
|
253
|
+
|
|
254
|
+
processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
|
|
255
|
+
last_message_date = processed_messages[-1]['datetime']
|
|
256
|
+
|
|
257
|
+
try:
|
|
258
|
+
redis_client: Optional[StrictRedis] = init_redis_client()
|
|
259
|
+
except Exception as redis_err:
|
|
260
|
+
redis_client = None
|
|
261
|
+
log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
|
|
262
|
+
|
|
263
|
+
start_date: Optional[datetime] = None
|
|
264
|
+
if param.get('start_date'):
|
|
265
|
+
try:
|
|
266
|
+
start_date = datetime.strptime(param['start_date'], '%Y-%m-%d').replace(tzinfo=pytz.UTC)
|
|
267
|
+
log(f"Fetching messages from {param['start_date']} onward", LogLevel.INFO)
|
|
268
|
+
except ValueError as e:
|
|
269
|
+
log(f"Invalid start_date format: {str(e)}. Defaulting to current time.", LogLevel.WARNING)
|
|
270
|
+
|
|
271
|
+
offset_date : datetime = start_date if start_date else last_message_date + timedelta(minutes=1)
|
|
272
|
+
|
|
273
|
+
async with TelegramClient(session_file, param['api_id'], param['api_hash']) as client:
|
|
274
|
+
try:
|
|
275
|
+
if not await client.is_user_authorized():
|
|
276
|
+
try:
|
|
277
|
+
await client.start(phone=param['phone'])
|
|
278
|
+
except SessionPasswordNeededError:
|
|
279
|
+
password: str = input("Two-factor authentication enabled. Enter your password: ")
|
|
280
|
+
await client.start(phone=param['phone'], password=password)
|
|
281
|
+
except FloodWaitError as e:
|
|
282
|
+
log(f"Flood wait error: Please wait {e.seconds} seconds", LogLevel.ERROR)
|
|
283
|
+
return
|
|
284
|
+
except Exception as e:
|
|
285
|
+
log(f"Authorization failed: {str(e)}", LogLevel.ERROR)
|
|
286
|
+
return
|
|
287
|
+
try:
|
|
288
|
+
channel: Any = await client.get_entity(param['channel_username'])
|
|
289
|
+
log(f"Connected to channel: {channel.title}", LogLevel.INFO)
|
|
290
|
+
except Exception as e:
|
|
291
|
+
log(f"Failed to access channel {param['channel_username']}: {str(e)}", LogLevel.ERROR)
|
|
292
|
+
return
|
|
293
|
+
|
|
294
|
+
last_message_date: datetime = offset_date
|
|
295
|
+
oldest_message, newest_message = None, None # type: ignore
|
|
296
|
+
while True:
|
|
297
|
+
tm1 = datetime(datetime.now().year, datetime.now().month, datetime.now().day) + timedelta(days=-1)
|
|
298
|
+
tm1 = tm1.astimezone(pytz.UTC)
|
|
299
|
+
|
|
300
|
+
messages = []
|
|
301
|
+
for username in param['users_filter']:
|
|
302
|
+
_messages = await client.get_messages(
|
|
303
|
+
channel,
|
|
304
|
+
limit=100,
|
|
305
|
+
from_user=username,
|
|
306
|
+
offset_date=last_message_date # offset_date is the cutoff
|
|
307
|
+
)
|
|
308
|
+
messages = messages + _messages
|
|
309
|
+
log(f"Fetched {len(messages)} raw messages with offset_date={last_message_date.isoformat()}", LogLevel.INFO)
|
|
310
|
+
|
|
311
|
+
'''
|
|
312
|
+
Sliding Window: way we increment 'last_message_date' (The cutoff), it's possible we miss some messages.
|
|
313
|
+
However, if we're moving the sliding window too slowly, it'd take forever to scan.
|
|
314
|
+
And if you hit their API too frequently:
|
|
315
|
+
Sleeping for 20s (0:00:20) on GetHistoryRequest flood wait
|
|
316
|
+
TG won't ban your account, but still you'd need to wait.
|
|
317
|
+
Adjust how fast you increment below to suit your purpose.
|
|
318
|
+
'''
|
|
319
|
+
realtime_cutoff = (datetime.now() + timedelta(minutes=-3)).astimezone(pytz.UTC)
|
|
320
|
+
relevant_messages = [ msg for msg in messages if (msg.sender.username.lower().strip() if msg.sender and msg.sender.username else str(msg.sender_id)) in param['users_filter'] ]
|
|
321
|
+
if not relevant_messages:
|
|
322
|
+
last_message_date = last_message_date + timedelta(hours=1)
|
|
323
|
+
if last_message_date>realtime_cutoff:
|
|
324
|
+
last_message_date = realtime_cutoff
|
|
325
|
+
continue
|
|
326
|
+
else:
|
|
327
|
+
sorted_messages = sorted(messages, key=lambda m: m.date)
|
|
328
|
+
last_message_date = sorted_messages[-1].date + timedelta(minutes=5) if sorted_messages[-1].date + timedelta(minutes=5)>last_message_date else last_message_date + timedelta(hours=1)
|
|
329
|
+
if last_message_date>realtime_cutoff:
|
|
330
|
+
last_message_date = realtime_cutoff
|
|
331
|
+
|
|
332
|
+
for message in sorted_messages: # Process oldest to newest
|
|
333
|
+
if not isinstance(message, Message):
|
|
334
|
+
continue
|
|
335
|
+
sender = await message.get_sender() # type: ignore
|
|
336
|
+
sender_name: Union[str, int] = sender.username if sender and sender.username else message.sender_id # type: ignore
|
|
337
|
+
sender_name = str(sender_name).lower().strip()
|
|
338
|
+
message_date: datetime = message.date # type: ignore
|
|
339
|
+
if message_date.tzinfo is None:
|
|
340
|
+
message_date = pytz.UTC.localize(message_date)
|
|
341
|
+
else:
|
|
342
|
+
message_date = message_date.astimezone(pytz.UTC)
|
|
343
|
+
|
|
344
|
+
message_text: str = message.message or ""
|
|
345
|
+
message_text = re.sub(r'[^a-zA-Z0-9\s.!?]', '', message_text)
|
|
346
|
+
message_text = message_text.replace(',', '')
|
|
347
|
+
|
|
348
|
+
message_data: Dict[str, Any] = {
|
|
349
|
+
"timestamp_ms": int(message_date.timestamp() * 1000),
|
|
350
|
+
"datetime": message_date.isoformat(), # Always in UTC
|
|
351
|
+
"sender": sender_name,
|
|
352
|
+
"message": message_text
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
|
|
356
|
+
message_hash: str = hashlib.sha256(json_str.encode('utf-8')).hexdigest()
|
|
357
|
+
|
|
358
|
+
if (
|
|
359
|
+
(
|
|
360
|
+
not param['users_filter']
|
|
361
|
+
or (param['users_filter'] and sender_name in param['users_filter'])
|
|
362
|
+
)
|
|
363
|
+
and message_hash not in seen_hashes
|
|
364
|
+
):
|
|
365
|
+
seen_hashes.add(message_hash)
|
|
366
|
+
processed_messages.append(message_data)
|
|
367
|
+
|
|
368
|
+
if message_date>tm1:
|
|
369
|
+
dispatch_notification(title=f"{param['current_filename']} {param['channel_username']} Incoming!", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
|
|
370
|
+
|
|
371
|
+
with open(message_cache_file, 'a', encoding='utf-8') as f:
|
|
372
|
+
json.dump(message_data, f, ensure_ascii=False)
|
|
373
|
+
f.write('\n')
|
|
374
|
+
|
|
375
|
+
if (
|
|
376
|
+
param['message_keywords_filter']
|
|
377
|
+
and any(keyword.lower().strip() in message_text.lower() for keyword in param['message_keywords_filter'])
|
|
378
|
+
):
|
|
379
|
+
if param['alert_wav_path'] and message_date>=tm1 and sys.platform == 'win32':
|
|
380
|
+
import winsound
|
|
381
|
+
for _ in range(param['num_shouts']):
|
|
382
|
+
winsound.PlaySound(param['alert_wav_path'], winsound.SND_FILENAME)
|
|
383
|
+
log(f"Incoming! {message_data}")
|
|
384
|
+
|
|
385
|
+
if redis_client:
|
|
386
|
+
try:
|
|
387
|
+
publish_topic = f"{param['mds']['topics']['tg_alert']}_{message.id}"
|
|
388
|
+
redis_client.publish(publish_topic, json_str)
|
|
389
|
+
redis_client.setex(message_hash, param['mds']['redis']['ttl_ms'] // 1000, json_str)
|
|
390
|
+
log(f"Published message {message.id} to Redis topic {publish_topic}", LogLevel.INFO)
|
|
391
|
+
except Exception as e:
|
|
392
|
+
log(f"Failed to publish to Redis: {str(e)}", LogLevel.ERROR)
|
|
393
|
+
|
|
394
|
+
if processed_messages:
|
|
395
|
+
oldest_message: Dict[str, Any] = min(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
396
|
+
newest_message: Dict[str, Any] = max(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
397
|
+
log(
|
|
398
|
+
json.dumps(
|
|
399
|
+
{
|
|
400
|
+
'num_messages': len(processed_messages),
|
|
401
|
+
'oldest': {
|
|
402
|
+
'timestamp_ms': oldest_message['timestamp_ms'],
|
|
403
|
+
'datetime': datetime.fromtimestamp(int(oldest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
404
|
+
},
|
|
405
|
+
'latest': {
|
|
406
|
+
'timestamp_ms': newest_message['timestamp_ms'],
|
|
407
|
+
'datetime': datetime.fromtimestamp(int(newest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
408
|
+
}
|
|
409
|
+
}, indent=2
|
|
410
|
+
),
|
|
411
|
+
LogLevel.INFO
|
|
412
|
+
)
|
|
413
|
+
else:
|
|
414
|
+
log(f"No messages processed in this iteration. last_message_date: {last_message_date}", LogLevel.INFO)
|
|
415
|
+
last_message_date = last_message_date + timedelta(days=1)
|
|
416
|
+
|
|
417
|
+
await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
|
|
418
|
+
|
|
419
|
+
except Exception as e:
|
|
420
|
+
log(f"Oops {str(e)} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}", LogLevel.ERROR)
|
|
421
|
+
|
|
422
|
+
if __name__ == '__main__':
|
|
423
|
+
try:
|
|
424
|
+
asyncio.run(main())
|
|
425
|
+
except KeyboardInterrupt:
|
|
426
|
+
log("Stopped by user", LogLevel.INFO)
|
|
427
|
+
except Exception as e:
|
|
428
|
+
log(f"Unexpected error: {str(e)}", LogLevel.ERROR)
|
|
@@ -12,20 +12,25 @@ from redis.client import PubSub
|
|
|
12
12
|
|
|
13
13
|
'''
|
|
14
14
|
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
15
|
-
python
|
|
15
|
+
python trigger_provider.py --provider_id aaa --tickers BTC/USDC:USDC,ETH/USDC:USDC,SOL/USDC:USDC
|
|
16
16
|
'''
|
|
17
17
|
|
|
18
|
-
param : Dict[str, str] = {
|
|
18
|
+
param : Dict[str, str|List[str]] = {
|
|
19
19
|
'provider_id' : '---'
|
|
20
20
|
}
|
|
21
21
|
|
|
22
22
|
def parse_args():
|
|
23
23
|
parser = argparse.ArgumentParser() # type: ignore
|
|
24
24
|
parser.add_argument("--provider_id", help="candle_provider will go to work if from redis a matching topic partition_assign_topic with provider_id in it.", default=None)
|
|
25
|
+
parser.add_argument("--tickers", help="Ticker(s) you're trading, comma separated list. Example BTC/USDC:USDC,ETH/USDC:USDC,SOL/USDC:USDC", default=None)
|
|
25
26
|
|
|
26
27
|
args = parser.parse_args()
|
|
27
28
|
param['provider_id'] = args.provider_id
|
|
28
29
|
|
|
30
|
+
tickers = args.tickers.split(',')
|
|
31
|
+
assert(len(tickers)>0)
|
|
32
|
+
param['tickers'] = [ ticker.strip() for ticker in tickers ]
|
|
33
|
+
|
|
29
34
|
def init_redis_client() -> StrictRedis:
|
|
30
35
|
redis_client : StrictRedis = StrictRedis(
|
|
31
36
|
host = 'localhost',
|
|
@@ -51,16 +56,12 @@ def trigger_producers(
|
|
|
51
56
|
if __name__ == '__main__':
|
|
52
57
|
parse_args()
|
|
53
58
|
|
|
54
|
-
provider_id : str = param['provider_id']
|
|
59
|
+
provider_id : str = param['provider_id'] # type: ignore
|
|
55
60
|
partition_assign_topic = 'mds_assign_$PROVIDER_ID$'
|
|
56
61
|
candles_partition_assign_topic = partition_assign_topic.replace("$PROVIDER_ID$", provider_id)
|
|
57
62
|
redis_client : StrictRedis = init_redis_client()
|
|
58
63
|
|
|
59
|
-
exchange_tickers : List[str] = [
|
|
60
|
-
'okx_linear|BTC/USDT:USDT',
|
|
61
|
-
'okx_linear|ETH/USDT:USDT',
|
|
62
|
-
'okx_linear|SOL/USDT:USDT',
|
|
63
|
-
]
|
|
64
|
+
exchange_tickers : List[str] = param['tickers'] # type: ignore
|
|
64
65
|
trigger_producers(
|
|
65
66
|
redis_client=redis_client,
|
|
66
67
|
exchange_tickers=exchange_tickers,
|