siglab-py 0.5.55__tar.gz → 0.5.97__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siglab-py might be problematic. Click here for more details.

Files changed (47) hide show
  1. {siglab_py-0.5.55 → siglab_py-0.5.97}/PKG-INFO +1 -1
  2. {siglab_py-0.5.55 → siglab_py-0.5.97}/pyproject.toml +1 -1
  3. {siglab_py-0.5.55 → siglab_py-0.5.97}/setup.cfg +1 -1
  4. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/constants.py +5 -0
  5. siglab_py-0.5.97/siglab_py/exchanges/deribit.py +83 -0
  6. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/candles_provider.py +2 -2
  7. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/candles_ta_provider.py +3 -3
  8. siglab_py-0.5.97/siglab_py/market_data_providers/google_monitor.py +320 -0
  9. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/orderbooks_provider.py +2 -2
  10. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/tg_monitor.py +6 -2
  11. siglab_py-0.5.55/siglab_py/market_data_providers/test_provider.py → siglab_py-0.5.97/siglab_py/market_data_providers/trigger_provider.py +9 -8
  12. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/ordergateway/gateway.py +70 -33
  13. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/integration/market_data_util_tests.py +34 -0
  14. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/unit/analytic_util_tests.py +2 -2
  15. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/analytic_util.py +81 -17
  16. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/market_data_util.py +133 -35
  17. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/simple_math.py +40 -5
  18. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py.egg-info/PKG-INFO +1 -1
  19. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py.egg-info/SOURCES.txt +3 -1
  20. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/__init__.py +0 -0
  21. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/exchanges/__init__.py +0 -0
  22. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/exchanges/any_exchange.py +0 -0
  23. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/exchanges/futubull.py +0 -0
  24. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/__init__.py +0 -0
  25. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/aggregated_orderbook_provider.py +0 -0
  26. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +0 -0
  27. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/deribit_options_expiry_provider.py +0 -0
  28. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/market_data_providers/futu_candles_ta_to_csv.py +0 -0
  29. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/ordergateway/__init__.py +0 -0
  30. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/ordergateway/client.py +0 -0
  31. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/ordergateway/encrypt_keys_util.py +0 -0
  32. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/ordergateway/test_ordergateway.py +0 -0
  33. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/__init__.py +0 -0
  34. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/integration/__init__.py +0 -0
  35. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/unit/__init__.py +0 -0
  36. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/unit/market_data_util_tests.py +0 -0
  37. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/unit/simple_math_tests.py +0 -0
  38. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/tests/unit/trading_util_tests.py +0 -0
  39. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/__init__.py +0 -0
  40. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/aws_util.py +0 -0
  41. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/notification_util.py +0 -0
  42. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/retry_util.py +0 -0
  43. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/slack_notification_util.py +0 -0
  44. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py/util/trading_util.py +0 -0
  45. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py.egg-info/dependency_links.txt +0 -0
  46. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py.egg-info/requires.txt +0 -0
  47. {siglab_py-0.5.55 → siglab_py-0.5.97}/siglab_py.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: siglab_py
3
- Version: 0.5.55
3
+ Version: 0.5.97
4
4
  Summary: Market data fetches, TA calculations and generic order gateway.
5
5
  Author: r0bbarh00d
6
6
  Author-email: r0bbarh00d <r0bbarh00d@gmail.com>
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "siglab_py"
7
- version = "0.5.55"
7
+ version = "0.5.97"
8
8
  description = "Market data fetches, TA calculations and generic order gateway."
9
9
  authors = [{name = "r0bbarh00d", email = "r0bbarh00d@gmail.com"}]
10
10
  license = {text = "MIT"}
@@ -1,6 +1,6 @@
1
1
  [metadata]
2
2
  name = siglab_py
3
- version = 0.5.55
3
+ version = 0.5.97
4
4
  description = Market data fetches, TA calculations and generic order gateway.
5
5
  author = r0bbarh00d
6
6
  author_email = r0bbarh00d@gmail.com
@@ -1,6 +1,8 @@
1
1
  import enum
2
2
  from typing import Union, List, Dict, Any
3
3
 
4
+ INVALID : int = -1
5
+
4
6
  JSON_SERIALIZABLE_TYPES = Union[str, bool, int, float, None, List[Any], Dict[Any, Any]]
5
7
 
6
8
  class LogLevel(enum.Enum):
@@ -21,3 +23,6 @@ class TrendDirection(enum.Enum):
21
23
 
22
24
  def to_string(self) -> str:
23
25
  return self.name.lower() if self != TrendDirection.UNDEFINED else ''
26
+
27
+ OrderSide = enum.Enum('OrderSide', 'UNDEFINED BUY SELL')
28
+ PositionStatus = enum.Enum("PositionStatus", 'UNDEFINED OPEN CLOSED SL')
@@ -0,0 +1,83 @@
1
+ from typing import Dict, Any
2
+
3
+ import ccxt
4
+ import ccxt.pro as ccxtpro
5
+
6
+ '''
7
+ Why override load_markets?
8
+ Deribit is one of the OG CEX. If you look at 'contractSize' for BTC/USDC:USDC, a linear perp, for example:
9
+ exchange.markets['BTC/USDC:USDC']['contractSize'] 0.0001
10
+ However, for Deribit, create_order actually expects 'amount' in base ccy, not in "# of contracts" as with most other exchanges supported by CCXT.
11
+ Also note that 'filled' in response from create_order also in base ccy, not in # contracts.
12
+ The general prevailing convention in CCXT is: 'amount' should be quoted in '# contracts'.
13
+ Why CCXT not fix Deribit, so that it follows the prevailing convention? This is because this would be a breaking changes.
14
+ Thus, we override 'contractSize' to 1 for all markets.
15
+
16
+ Additionally, we need to override 'fetch_position' as it swapped 'notional' with 'contracts'!!! Real ugly. Example below.
17
+ 'id' = None
18
+ 'symbol' = 'BTC/USDC:USDC'
19
+ 'timestamp' = None
20
+ 'datetime' = None
21
+ 'lastUpdateTimestamp' = None
22
+ 'initialMargin' = ???
23
+ 'initialMarginPercentage' = ???
24
+ 'maintenanceMargin' = ???
25
+ 'maintenanceMarginPercentage' = ???
26
+ 'entryPrice' = 85657.0
27
+ 'notional' = 0.0009 <-- This is NOT USD! And this is NOT # Contracts! This is # BTC!
28
+ 'leverage' = 50
29
+ 'unrealizedPnl' = ???
30
+ 'realizedPnl' = ???
31
+ 'contracts' = 77.081445 <-- This is NOT "# contracts"! 0.0009 BTC x markPrice 85646.05
32
+ 'contractSize' = 1.0
33
+ 'marginRatio' = None
34
+ 'liquidationPrice' = None
35
+ 'markPrice' = 85646.05 <-- They use 'markPrice' to calc 'contracts'
36
+ 'lastPrice' = None
37
+ 'collateral' = None
38
+ 'marginMode' = None
39
+ 'side' = 'long'
40
+ 'percentage' = None
41
+ 'hedged' = None
42
+ 'stopLossPrice' = None
43
+ 'takeProfitPrice' = None
44
+ '''
45
+ class Deribit(ccxt.deribit):
46
+ def __init__(self, *args: Dict[str, Any]) -> None:
47
+ super().__init__(*args) # type: ignore
48
+
49
+ def load_markets(self, reload=False, params={}):
50
+ self.markets = super().load_markets(reload=reload, params=params)
51
+
52
+ for market in self.markets:
53
+ self.markets[market]['contractSize'] = 1
54
+
55
+ return self.markets
56
+
57
+ def fetch_position(self, symbol: str, params={}): # type: ignore
58
+ position = super().fetch_position(symbol=symbol, params=params)
59
+ pos_usdt = position['contracts']
60
+ pos_baseccy = position['notional']
61
+ position['contracts'] = pos_baseccy
62
+ position['notional'] = pos_usdt
63
+ return position
64
+
65
+ class DeribitAsync(ccxtpro.deribit):
66
+ def __init__(self, *args: Dict[str, Any]) -> None:
67
+ super().__init__(*args) # type: ignore
68
+
69
+ async def load_markets(self, reload=False, params={}):
70
+ self.markets = await super().load_markets(reload=reload, params=params)
71
+
72
+ for market in self.markets:
73
+ self.markets[market]['contractSize'] = 1
74
+
75
+ return self.markets
76
+
77
+ async def fetch_position(self, symbol: str, params={}): # type: ignore
78
+ position = await super().fetch_position(symbol=symbol, params=params)
79
+ pos_usdt = position['contracts']
80
+ pos_baseccy = position['notional']
81
+ position['contracts'] = pos_baseccy
82
+ position['notional'] = pos_usdt
83
+ return position
@@ -33,7 +33,7 @@ This script is pypy compatible:
33
33
  pypy candles_provider.py --provider_id aaa --candle_size 1h --how_many_candles 2169 --redis_ttl_ms 3600000
34
34
 
35
35
  Key parameters you may want to modify:
36
- provider_id: You can trigger this provider instance using test_provider.py. Of course, you'd write your own.
36
+ provider_id: You can trigger this provider instance using trigger_provider.py. Of course, you'd write your own.
37
37
  candle_size: 1m, 5m, 15min, 1h, 1d for example.
38
38
  how_many_candles: default to 2169 (24 x 90).
39
39
  redis_ttl_ms: This is how long orderbook snapshot will last on redis when provider publishes to it.
@@ -318,7 +318,7 @@ async def main():
318
318
  self.universe_reload_id = universe_reload_id
319
319
  task = None
320
320
 
321
- log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use test_provider.py to trigger it)")
321
+ log(f"candles_provider {param['provider_id']} started, waiting for trigger. (Can use trigger_provider.py to trigger it)")
322
322
 
323
323
  universe_reload_id = 1
324
324
  for message in redis_pubsub.listen():
@@ -219,11 +219,11 @@ def work(
219
219
  https://redis.io/commands/set/
220
220
  '''
221
221
  expiry_sec : int = 0
222
- if candle_size=="m":
222
+ if candle_size[-1]=="m":
223
223
  expiry_sec = 60 + 60*15
224
- elif candle_size=="h":
224
+ elif candle_size[-1]=="h":
225
225
  expiry_sec = 60*60 + 60*15
226
- elif candle_size=="d":
226
+ elif candle_size[-1]=="d":
227
227
  expiry_sec = 60*60*24
228
228
  expiry_sec += 60*15 # additional 15min
229
229
 
@@ -0,0 +1,320 @@
1
+ import asyncio
2
+ import sys
3
+ import traceback
4
+ import os
5
+ import argparse
6
+ import json
7
+ import hashlib
8
+ import re
9
+ from datetime import datetime, timedelta, timezone
10
+ import time
11
+ import pytz
12
+ import arrow
13
+ from enum import Enum
14
+ import logging
15
+ import requests
16
+ from typing import Dict, Optional, Set, Any, Union, List
17
+ from redis import StrictRedis
18
+
19
+ from siglab_py.util.notification_util import dispatch_notification
20
+
21
+ current_filename = os.path.basename(__file__)
22
+
23
+ '''
24
+ google_monitor fetches messages from particular query. Then:
25
+ a. Save (and accumulate) messages to message cache file (No duplicates) for further analysis.
26
+ message_cache_file: str = f"google_search_messages.json"
27
+
28
+ b. If any of keywords in message_keywords_filter matches words in message (--message_keywords_filter):
29
+ - Publish to redis for strategy consumption, topic: param['mds']['topics']['google_alert']
30
+ - Dispatch slack alert
31
+ - If scripts runs on Windows, play a wav file (Feels free make modification play sounds on Ubuntu for example)
32
+
33
+ Usage:
34
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
35
+ python google_monitor.py --apikey xxx --search_engine_id yyy --query "site:twitter.com @user_id1 @user_id2 some topic" --slack_info_url=https://hooks.slack.com/services/xxx --slack_critial_url=https://hooks.slack.com/services/xxx --slack_alert_url=https://hooks.slack.com/services/xxx
36
+
37
+ alert_wav_path
38
+ Point it to wav file for alert notification. It's using 'winsound', i.e. Windows only.
39
+ Set to None otherwise.
40
+
41
+ Google API: https://console.cloud.google.com/apis/credentials?project=YOUR_PROJECT
42
+ name: YOUR_API_KEY_NAME
43
+ apikey: ?????
44
+
45
+ Google Search Engine
46
+ To create
47
+ name: siglab_py_search: https://programmablesearchengine.google.com/controlpanel/create
48
+ <script async src="https://cse.google.com/cse.js?cx=YOUR_SEARCH_ENGINE_ID">
49
+ </script>
50
+ <div class="gcse-search"></div>
51
+ Then enable it: https://console.developers.google.com/apis/api/customsearch.googleapis.com/overview?project=?????
52
+
53
+ launch.json for Debugging from VSCode:
54
+ {
55
+ "version": "0.2.0",
56
+ "configurations": [
57
+ {
58
+ "name": "Python: Current File",
59
+ "type": "python",
60
+ "request": "launch",
61
+ "program": "${file}",
62
+ "console": "integratedTerminal",
63
+ "justMyCode": false,
64
+ "args" : [
65
+ "--apikey", "xxx",
66
+ "--search_engine_id", "yyy",
67
+ "--query", "site:twitter.com @user_id1 @user_id2 some topic",
68
+ "--slack_info_url", "https://hooks.slack.com/services/xxx",
69
+ "--slack_critial_url", "https://hooks.slack.com/services/xxx",
70
+ "--slack_alert_url", "https://hooks.slack.com/services/xxx",
71
+ ],
72
+ }
73
+ ]
74
+ }
75
+ '''
76
+
77
+ param: Dict[str, Any] = {
78
+ 'apikey': os.getenv('GOOGLE_APIKEY', 'xxx'),
79
+ 'search_engine_id': os.getenv('GOOGLE_SEARCH_ENGINE_ID', 'xxx'),
80
+ 'num_results' : 10,
81
+ 'query' : '',
82
+ 'alert_wav_path' : r"d:\sounds\terrible.wav",
83
+ "num_shouts" : 5, # How many times 'alert_wav_path' is played
84
+ "loop_freq_ms" : 1000*60*15, # Google allow max 100 calls per day free.
85
+ 'current_filename' : current_filename,
86
+
87
+ 'notification' : {
88
+ 'footer' : None,
89
+
90
+ # slack webhook url's for notifications
91
+ 'slack' : {
92
+ 'info' : { 'webhook_url' : None },
93
+ 'critical' : { 'webhook_url' : None },
94
+ 'alert' : { 'webhook_url' : None },
95
+ }
96
+ },
97
+
98
+ 'mds': {
99
+ 'topics': {
100
+ 'tg_alert': 'tg_alert'
101
+ },
102
+ 'redis': {
103
+ 'host': 'localhost',
104
+ 'port': 6379,
105
+ 'db': 0,
106
+ 'ttl_ms': 1000 * 60 * 15
107
+ }
108
+ }
109
+ }
110
+
111
+ class LogLevel(Enum):
112
+ CRITICAL = 50
113
+ ERROR = 40
114
+ WARNING = 30
115
+ INFO = 20
116
+ DEBUG = 10
117
+ NOTSET = 0
118
+
119
+ logging.Formatter.converter = time.gmtime
120
+ logger: logging.Logger = logging.getLogger()
121
+ log_level: int = logging.INFO
122
+ logger.setLevel(log_level)
123
+ format_str: str = '%(asctime)s %(message)s'
124
+ formatter: logging.Formatter = logging.Formatter(format_str)
125
+ sh: logging.StreamHandler = logging.StreamHandler()
126
+ sh.setLevel(log_level)
127
+ sh.setFormatter(formatter)
128
+ logger.addHandler(sh)
129
+
130
+ def log(message: str, log_level: LogLevel = LogLevel.INFO) -> None:
131
+ if log_level.value < LogLevel.WARNING.value:
132
+ logger.info(f"{datetime.now()} {message}")
133
+ elif log_level.value == LogLevel.WARNING.value:
134
+ logger.warning(f"{datetime.now()} {message}")
135
+ elif log_level.value == LogLevel.ERROR.value:
136
+ logger.error(f"{datetime.now()} {message}")
137
+
138
+ def parse_args():
139
+ parser = argparse.ArgumentParser() # type: ignore
140
+ parser.add_argument("--apikey", help="API key", default=None)
141
+ parser.add_argument("--search_engine_id", help="Google search engine ID", default=None)
142
+ parser.add_argument("--num_results", help="Max number items to fetch", default=10)
143
+ parser.add_argument("--query", help="Query - what are you looking for?", default=None)
144
+ parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
145
+ parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
146
+ parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
147
+
148
+ args = parser.parse_args()
149
+
150
+ param['apikey'] = args.apikey
151
+ param['search_engine_id'] = args.search_engine_id
152
+ param['num_results'] = args.num_results
153
+ param['query'] = args.query
154
+
155
+ param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
156
+ param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
157
+ param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
158
+
159
+ param['notification']['footer'] = f"From {param['current_filename']}"
160
+
161
+ print(f"Startup args: {args}") # Dont use logger, not yet setup yet.
162
+ print(f"param: {print(json.dumps(param, indent=2))}")
163
+
164
+ def init_redis_client() -> StrictRedis:
165
+ redis_client : StrictRedis = StrictRedis(
166
+ host = param['mds']['redis']['host'],
167
+ port = param['mds']['redis']['port'],
168
+ db = 0,
169
+ ssl = False
170
+ )
171
+ try:
172
+ redis_client.keys()
173
+ except ConnectionError as redis_conn_error:
174
+ err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
175
+ log(f"Failed to init redis connection. Will skip publishes to redis. {err_msg}")
176
+ redis_client = None # type: ignore
177
+
178
+ return redis_client
179
+
180
+ def search_google_custom(query, api_key, search_engine_id, num_results=10):
181
+ url = 'https://www.googleapis.com/customsearch/v1'
182
+ params = {
183
+ 'key': api_key,
184
+ 'cx': search_engine_id,
185
+ 'q': query,
186
+ 'num': num_results,
187
+ 'sort': 'date',
188
+ 'dateRestrict': 'd1' # Restrict to most recent (adjust as needed: d1=day, m1=month, etc.)
189
+ }
190
+
191
+ response = requests.get(url, params=params)
192
+
193
+ if response.status_code == 200:
194
+ return response.json()
195
+ else:
196
+ log(f"Query error: {response.status_code} - {response.text}")
197
+ return None
198
+
199
+ async def main() -> None:
200
+ parse_args()
201
+
202
+ message_cache_file: str = f"google_search_messages.json"
203
+ log(f"message_cache_file: {message_cache_file}")
204
+
205
+ notification_params : Dict[str, Any] = param['notification']
206
+
207
+ processed_messages : List[Dict[str, Any]] = []
208
+ seen_hashes : Set[str] = set()
209
+ if os.path.exists(message_cache_file):
210
+ with open(message_cache_file, 'r', encoding='utf-8') as f:
211
+ lines = f.readlines()
212
+ for line in lines:
213
+ message_data = json.loads(line)
214
+ message_hash: str = hashlib.sha256(message_data['message'].encode('utf-8')).hexdigest()
215
+
216
+ message_data['datetime'] = pytz.UTC.localize(arrow.get(message_data['datetime']).datetime.replace(tzinfo=None))
217
+
218
+ if message_hash not in seen_hashes:
219
+ seen_hashes.add(message_hash)
220
+ processed_messages.append(message_data)
221
+
222
+ processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
223
+
224
+ try:
225
+ redis_client: Optional[StrictRedis] = init_redis_client()
226
+ except Exception as redis_err:
227
+ redis_client = None
228
+ log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
229
+
230
+ while True:
231
+ try:
232
+ results = search_google_custom(param['query'], param['apikey'], param['search_engine_id'], param['num_results'])
233
+
234
+ if results:
235
+ if 'items' in results:
236
+ for item in results['items']:
237
+ title = item.get('title', 'No title')
238
+ snippet = item.get('snippet', 'No snippet')
239
+ link = item.get('link', 'No link')
240
+ published_date = item.get('pagemap', {}).get('metatags', [{}])[0].get('article:published_time', 'No date')
241
+
242
+ dt_message = datetime.now()
243
+ pattern = r'^\d+\s*(?:days?|day?|hours?|hour?|minutes?|minute?|seconds?|second?|h|m|s)\s*(?:ago)?.*?([A-Za-z]+\s+\d+,\s+\d{4},\s+\d+:\d+\s+[AP]M\s+ET)'
244
+ match = re.match(pattern, snippet)
245
+ if published_date == 'No date' and match:
246
+ published_date = match.group(1)
247
+ dt_message = datetime.strptime(published_date, '%b %d, %Y, %I:%M %p ET')
248
+
249
+ snippet = re.sub(pattern, '', snippet).strip()
250
+
251
+ timestamp_ms = int(dt_message.timestamp() * 1000)
252
+ message_data: Dict[str, Any] = {
253
+ "timestamp_ms": timestamp_ms,
254
+ "datetime": dt_message.isoformat(), # Always in UTC
255
+ "title" : title,
256
+ "message": snippet,
257
+ "url" : link
258
+ }
259
+ json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
260
+ message_hash: str = hashlib.sha256(snippet.encode('utf-8')).hexdigest()
261
+ if (message_hash not in seen_hashes):
262
+ seen_hashes.add(message_hash)
263
+ processed_messages.append(message_data)
264
+
265
+ log(f"{message_data}")
266
+
267
+ dispatch_notification(title=f"{param['current_filename']} Incoming! {title}", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
268
+
269
+ with open(message_cache_file, 'a', encoding='utf-8') as f:
270
+ json.dump(message_data, f, ensure_ascii=False)
271
+ f.write('\n')
272
+
273
+ if param['alert_wav_path']and sys.platform == 'win32':
274
+ import winsound
275
+ for _ in range(param['num_shouts']):
276
+ winsound.PlaySound(param['alert_wav_path'], winsound.SND_FILENAME)
277
+
278
+ if redis_client:
279
+ try:
280
+ publish_topic = f"google_search"
281
+ redis_client.publish(publish_topic, json_str)
282
+ redis_client.setex(message_hash, param['mds']['redis']['ttl_ms'] // 1000, json_str)
283
+ log(f"Published message {json_str} to Redis topic {publish_topic}", LogLevel.INFO)
284
+ except Exception as e:
285
+ log(f"Failed to publish to Redis: {str(e)}", LogLevel.ERROR)
286
+
287
+ await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
288
+
289
+ if processed_messages:
290
+ oldest_message: Dict[str, Any] = min(processed_messages, key=lambda x: x['timestamp_ms'])
291
+ newest_message: Dict[str, Any] = max(processed_messages, key=lambda x: x['timestamp_ms'])
292
+ log(
293
+ json.dumps(
294
+ {
295
+ 'num_messages': len(processed_messages),
296
+ 'oldest': {
297
+ 'timestamp_ms': oldest_message['timestamp_ms'],
298
+ 'datetime': datetime.fromtimestamp(int(oldest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
299
+ },
300
+ 'latest': {
301
+ 'timestamp_ms': newest_message['timestamp_ms'],
302
+ 'datetime': datetime.fromtimestamp(int(newest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
303
+ }
304
+ }, indent=2
305
+ ),
306
+ LogLevel.INFO
307
+ )
308
+
309
+ except Exception as e:
310
+ log(f"Oops {str(e)} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}", LogLevel.ERROR)
311
+ finally:
312
+ await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
313
+
314
+ if __name__ == '__main__':
315
+ try:
316
+ asyncio.run(main())
317
+ except KeyboardInterrupt:
318
+ log("Stopped by user", LogLevel.INFO)
319
+ except Exception as e:
320
+ log(f"Unexpected error: {str(e)}", LogLevel.ERROR)
@@ -28,7 +28,7 @@ To start from command prompt:
28
28
  This script is pypy compatible.
29
29
 
30
30
  Key parameters you may want to modify:
31
- provider_id: You can trigger this provider instance using test_provider.py. Of course, you'd write your own.
31
+ provider_id: You can trigger this provider instance using trigger_provider.py. Of course, you'd write your own.
32
32
  instance_capacity: max # tickers this provider instance will handle.
33
33
  ts_delta_observation_ms_threshold: default to 150ms. "Observation Delta" is clock diff between orderbook timestamp, and your local server clock.
34
34
  ts_delta_consecutive_ms_threshold: default to 150ms. "Consecutive Delta" is time elapsed between consecutive orderbook updates.
@@ -348,7 +348,7 @@ async def main():
348
348
  candles_publish_topic : str = param['mds']['topics']['candles_publish_topic']
349
349
  redis_pubsub : PubSub = init_redis_channel_subscription(redis_client, partition_assign_topic)
350
350
 
351
- log(f"orderbooks_provider {param['provider_id']} started, waiting for trigger. (Can use test_provider.py to trigger it)")
351
+ log(f"orderbooks_provider {param['provider_id']} started, waiting for trigger. (Can use trigger_provider.py to trigger it)")
352
352
 
353
353
  tasks = []
354
354
  for message in redis_pubsub.listen():
@@ -254,7 +254,11 @@ async def main() -> None:
254
254
  processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
255
255
  last_message_date = processed_messages[-1]['datetime']
256
256
 
257
- redis_client: Optional[StrictRedis] = init_redis_client()
257
+ try:
258
+ redis_client: Optional[StrictRedis] = init_redis_client()
259
+ except Exception as redis_err:
260
+ redis_client = None
261
+ log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
258
262
 
259
263
  start_date: Optional[datetime] = None
260
264
  if param.get('start_date'):
@@ -362,7 +366,7 @@ async def main() -> None:
362
366
  processed_messages.append(message_data)
363
367
 
364
368
  if message_date>tm1:
365
- dispatch_notification(title=f"{param['current_filename']} {param['channel_username']} started", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
369
+ dispatch_notification(title=f"{param['current_filename']} {param['channel_username']} Incoming!", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
366
370
 
367
371
  with open(message_cache_file, 'a', encoding='utf-8') as f:
368
372
  json.dump(message_data, f, ensure_ascii=False)
@@ -12,20 +12,25 @@ from redis.client import PubSub
12
12
 
13
13
  '''
14
14
  set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
15
- python test_provider.py --provider_id aaa
15
+ python trigger_provider.py --provider_id aaa --tickers BTC/USDC:USDC,ETH/USDC:USDC,SOL/USDC:USDC
16
16
  '''
17
17
 
18
- param : Dict[str, str] = {
18
+ param : Dict[str, str|List[str]] = {
19
19
  'provider_id' : '---'
20
20
  }
21
21
 
22
22
  def parse_args():
23
23
  parser = argparse.ArgumentParser() # type: ignore
24
24
  parser.add_argument("--provider_id", help="candle_provider will go to work if from redis a matching topic partition_assign_topic with provider_id in it.", default=None)
25
+ parser.add_argument("--tickers", help="Ticker(s) you're trading, comma separated list. Example BTC/USDC:USDC,ETH/USDC:USDC,SOL/USDC:USDC", default=None)
25
26
 
26
27
  args = parser.parse_args()
27
28
  param['provider_id'] = args.provider_id
28
29
 
30
+ tickers = args.tickers.split(',')
31
+ assert(len(tickers)>0)
32
+ param['tickers'] = [ ticker.strip() for ticker in tickers ]
33
+
29
34
  def init_redis_client() -> StrictRedis:
30
35
  redis_client : StrictRedis = StrictRedis(
31
36
  host = 'localhost',
@@ -51,16 +56,12 @@ def trigger_producers(
51
56
  if __name__ == '__main__':
52
57
  parse_args()
53
58
 
54
- provider_id : str = param['provider_id']
59
+ provider_id : str = param['provider_id'] # type: ignore
55
60
  partition_assign_topic = 'mds_assign_$PROVIDER_ID$'
56
61
  candles_partition_assign_topic = partition_assign_topic.replace("$PROVIDER_ID$", provider_id)
57
62
  redis_client : StrictRedis = init_redis_client()
58
63
 
59
- exchange_tickers : List[str] = [
60
- 'okx_linear|BTC/USDT:USDT',
61
- 'okx_linear|ETH/USDT:USDT',
62
- 'okx_linear|SOL/USDT:USDT',
63
- ]
64
+ exchange_tickers : List[str] = param['tickers'] # type: ignore
64
65
  trigger_producers(
65
66
  redis_client=redis_client,
66
67
  exchange_tickers=exchange_tickers,