siglab-py 0.5.2__py3-none-any.whl → 0.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siglab-py might be problematic. Click here for more details.

@@ -0,0 +1,424 @@
1
+ import asyncio
2
+ import sys
3
+ import traceback
4
+ import os
5
+ import argparse
6
+ import json
7
+ import hashlib
8
+ import re
9
+ from datetime import datetime, timedelta, timezone
10
+ import time
11
+ import pytz
12
+ import arrow
13
+ from enum import Enum
14
+ import logging
15
+ from typing import Dict, Optional, Set, Any, Union, List
16
+ from telethon.sync import TelegramClient
17
+ from telethon.errors import SessionPasswordNeededError, FloodWaitError
18
+ from telethon.types import Message
19
+ from redis import StrictRedis
20
+
21
+ from siglab_py.util.notification_util import dispatch_notification
22
+
23
+ current_filename = os.path.basename(__file__)
24
+
25
+ '''
26
+ tg_monitor fetches messages from particular TG channel (--channel_username). Then:
27
+ a. Save (and accumulate) messages to message cache file (No duplicates) for further analysis.
28
+ message_cache_file: str = f"{param['channel_username'].lstrip('@')}_messages.json"
29
+ Note, only messages from senders in param['users_filter'] will be included.
30
+
31
+ b. If any of keywords in message_keywords_filter matches words in message (--message_keywords_filter):
32
+ - Publish to redis for strategy consumption, topic: param['mds']['topics']['tg_alert']
33
+ - Dispatch slack alert
34
+ - If scripts runs on Windows, play a wav file (Feels free make modification play sounds on Ubuntu for example)
35
+
36
+ Usage:
37
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
38
+ python tg_monitor.py --api_id xxx --api_hash yyy --phone +XXXYYYYYYYY --channel_username @zZz --users_filter "yYy,zZz" --start_date 2025-03-01 --message_keywords_filter "trump,trade war,tariff" --slack_info_url=https://hooks.slack.com/services/xxx --slack_critial_url=https://hooks.slack.com/services/xxx --slack_alert_url=https://hooks.slack.com/services/xxx
39
+
40
+ api_id and api_hash
41
+ Go to https://my.telegram.org/
42
+ It's under "API development tools"
43
+
44
+ phone
45
+ Format: +XXXYYYYYYYY where XXX is country/area code.
46
+
47
+ channel_username and users_filter
48
+ You decide which TG channel, and under that channel what users you're tracking. TG is noisy.
49
+
50
+ start_date
51
+ tg_monitor relies on TelegramClient.get_messages: https://docs.telethon.dev/en/stable/modules/client.html#telethon.client.messages.MessageMethods.get_messages
52
+ As with most API, they generally limit number of entries you can fetch in one go (~100).
53
+ tg_monitor implemented sliding window technique with cutoff = start_date for the first fetch.
54
+ Note, if you increment your sliding window too quickly, you'd miss some messages. If you scan too slowly, it'd take forever. It's a balancing act.
55
+ Atm, here's the general logic how we move the sliding window foward:
56
+ * If fetches return zero message that matches param['users_filter], we move the window forward by an hour.
57
+ * Otherwise, we move the window by adding 5 minutes to timestamp of last message fetched (whether matches users_filter or not) just fetched.
58
+ A decent specification of 'users_filter' will help minimize chances of missed messages.
59
+ Default: None, cutoff date will be set to tm1 in this case if message_cache_file NOT exists.
60
+ Otherwise, it'd default to latest message's datetime, plus a couple minute.
61
+ Set to 'yyyy-MM-dd' if you want to collect more history for analysis.
62
+ Generally, TG allows you fetch around three months history. Anything more, get_messages will return empty array.
63
+
64
+ Regardless, messages you collected is accumulated in 'message_cache_file'.
65
+
66
+ message_keywords_filter
67
+ TG message_keywords_filter: Comma separated list, case-insensitive. Default: None (i.e. no keywords)
68
+ Example, --message_keywords_filter "exploit, attack, hack, breach, compromise, stolen, leak, security incident, phishing, social engineer, withdrawals freeze, frozen"
69
+
70
+ We play a sound, publish to redis..etc, only if message contain any of the keywords in message_keywords_filter.
71
+ Quote around comma separated list.
72
+
73
+ alert_wav_path
74
+ Point it to wav file for alert notification. It's using 'winsound', i.e. Windows only.
75
+ Set to None otherwise.
76
+
77
+ launch.json for Debugging from VSCode:
78
+ {
79
+ "version": "0.2.0",
80
+ "configurations": [
81
+ {
82
+ "name": "Python: Current File",
83
+ "type": "python",
84
+ "request": "launch",
85
+ "program": "${file}",
86
+ "console": "integratedTerminal",
87
+ "justMyCode": false,
88
+ "args" : [
89
+ "--api_id", "xxx",
90
+ "--api_hash", "xxx",
91
+ "--phone", "+XXXYYYYYYYY",
92
+ "--channel_username", "@SomeChannel",
93
+ "--users_filter", "SomeBody",
94
+ "--start_date", "2025-03-01",
95
+ "--message_keywords_filter", "exploit, attack, hack, breach, compromise, stolen, leak, security incident, phishing, social engineer, withdrawals freeze, frozen",
96
+ "--slack_info_url", "https://hooks.slack.com/services/xxx",
97
+ "--slack_critial_url", "https://hooks.slack.com/services/xxx",
98
+ "--slack_alert_url", "https://hooks.slack.com/services/xxx",
99
+ ],
100
+ }
101
+ ]
102
+ }
103
+
104
+ https://norman-lm-fung.medium.com/monitoring-telegram-channel-tg-monitor-from-siglab-py-f7ec30c2c32e
105
+ '''
106
+
107
+ param: Dict[str, Any] = {
108
+ 'api_id': os.getenv('TELEGRAM_API_ID', 'xxx'),
109
+ 'api_hash': os.getenv('TELEGRAM_API_HASH', 'xxx'),
110
+ 'phone': os.getenv('TELEGRAM_PHONE', '+XXXYYYYYYYY'),
111
+ 'channel_username': '@SomeChannel',
112
+ 'users_filter' : None,
113
+ 'message_keywords_filter': [],
114
+ 'start_date': None,
115
+ 'alert_wav_path' : r"d:\sounds\terrible.wav",
116
+ "num_shouts" : 15, # How many times 'alert_wav_path' is played
117
+ "loop_freq_ms" : 1000,
118
+ 'current_filename' : current_filename,
119
+
120
+ 'notification' : {
121
+ 'footer' : None,
122
+
123
+ # slack webhook url's for notifications
124
+ 'slack' : {
125
+ 'info' : { 'webhook_url' : None },
126
+ 'critical' : { 'webhook_url' : None },
127
+ 'alert' : { 'webhook_url' : None },
128
+ }
129
+ },
130
+
131
+ 'mds': {
132
+ 'topics': {
133
+ 'tg_alert': 'tg_alert'
134
+ },
135
+ 'redis': {
136
+ 'host': 'localhost',
137
+ 'port': 6379,
138
+ 'db': 0,
139
+ 'ttl_ms': 1000 * 60 * 15
140
+ }
141
+ }
142
+ }
143
+
144
+ class LogLevel(Enum):
145
+ CRITICAL = 50
146
+ ERROR = 40
147
+ WARNING = 30
148
+ INFO = 20
149
+ DEBUG = 10
150
+ NOTSET = 0
151
+
152
+ logging.Formatter.converter = time.gmtime
153
+ logger: logging.Logger = logging.getLogger()
154
+ log_level: int = logging.INFO
155
+ logger.setLevel(log_level)
156
+ format_str: str = '%(asctime)s %(message)s'
157
+ formatter: logging.Formatter = logging.Formatter(format_str)
158
+ sh: logging.StreamHandler = logging.StreamHandler()
159
+ sh.setLevel(log_level)
160
+ sh.setFormatter(formatter)
161
+ logger.addHandler(sh)
162
+
163
+ def log(message: str, log_level: LogLevel = LogLevel.INFO) -> None:
164
+ if log_level.value < LogLevel.WARNING.value:
165
+ logger.info(f"{datetime.now()} {message}")
166
+ elif log_level.value == LogLevel.WARNING.value:
167
+ logger.warning(f"{datetime.now()} {message}")
168
+ elif log_level.value == LogLevel.ERROR.value:
169
+ logger.error(f"{datetime.now()} {message}")
170
+
171
+ def parse_args():
172
+ parser = argparse.ArgumentParser() # type: ignore
173
+ parser.add_argument("--api_id", help="TG api_id", default=None)
174
+ parser.add_argument("--api_hash", help="TG api_hash", default=None)
175
+ parser.add_argument("--phone", help="G Phone tied to TG. Format: +XXXYYYYYYYY where XXX is country/area code.", default=None)
176
+ parser.add_argument("--channel_username", help="TG channel_username", default=None)
177
+ parser.add_argument("--users_filter", help="Comma separated list of TG user names", default=None)
178
+ parser.add_argument("--message_keywords_filter", help="TG message_keywords_filter: Comma separated list, case-insensitive. Default: None (i.e. no keywords)", default=None)
179
+ parser.add_argument("--start_date", help="start_date, format: yyyy-MM-dd. If left to null, cutoff date default to last message's datetime from message cache, or tm1.", default=None)
180
+
181
+ parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
182
+ parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
183
+ parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
184
+
185
+ args = parser.parse_args()
186
+
187
+ param['api_id'] = args.api_id
188
+ param['api_hash'] = args.api_hash
189
+ param['channel_username'] = args.channel_username
190
+ if args.users_filter:
191
+ param['users_filter'] = [ user.lower().strip() for user in args.users_filter.split(',') ]
192
+ param['start_date'] = args.start_date
193
+
194
+ if args.message_keywords_filter:
195
+ param['message_keywords_filter'] = args.message_keywords_filter.split(',')
196
+
197
+ param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
198
+ param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
199
+ param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
200
+
201
+ param['notification']['footer'] = f"From {param['current_filename']} {param['channel_username'].lstrip('@')}"
202
+
203
+ print(f"Startup args: {args}") # Dont use logger, not yet setup yet.
204
+ print(f"param: {print(json.dumps(param, indent=2))}")
205
+
206
+ def init_redis_client() -> StrictRedis:
207
+ redis_client : StrictRedis = StrictRedis(
208
+ host = param['mds']['redis']['host'],
209
+ port = param['mds']['redis']['port'],
210
+ db = 0,
211
+ ssl = False
212
+ )
213
+ try:
214
+ redis_client.keys()
215
+ except ConnectionError as redis_conn_error:
216
+ err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
217
+ log(f"Failed to init redis connection. Will skip publishes to redis. {err_msg}")
218
+ redis_client = None # type: ignore
219
+
220
+ return redis_client
221
+
222
+ async def main() -> None:
223
+ parse_args()
224
+
225
+ session_file: str = f"{param['channel_username'].lstrip('@')}_session"
226
+ message_cache_file: str = f"{param['channel_username'].lstrip('@')}_messages.json"
227
+ log(f"session_file: {session_file}")
228
+ log(f"message_cache_file: {message_cache_file}")
229
+
230
+ notification_params : Dict[str, Any] = param['notification']
231
+
232
+ tm1 : datetime = datetime(datetime.now().year, datetime.now().month, datetime.now().day) + timedelta(days=-1)
233
+ tm1 = tm1.astimezone(pytz.UTC)
234
+
235
+ last_message_date: datetime = tm1
236
+ processed_messages : List[Dict[str, Any]] = []
237
+ seen_hashes : Set[str] = set()
238
+ if os.path.exists(message_cache_file):
239
+ with open(message_cache_file, 'r', encoding='utf-8') as f:
240
+ lines = f.readlines()
241
+ for line in lines:
242
+ message_data = json.loads(line)
243
+
244
+ # json.dumps before converting datetime to type(datetime)
245
+ json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
246
+ message_hash: str = hashlib.sha256(json_str.encode('utf-8')).hexdigest()
247
+
248
+ message_data['datetime'] = pytz.UTC.localize(arrow.get(message_data['datetime']).datetime.replace(tzinfo=None))
249
+
250
+ if message_hash not in seen_hashes:
251
+ seen_hashes.add(message_hash)
252
+ processed_messages.append(message_data)
253
+
254
+ processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
255
+ last_message_date = processed_messages[-1]['datetime']
256
+
257
+ redis_client: Optional[StrictRedis] = init_redis_client()
258
+
259
+ start_date: Optional[datetime] = None
260
+ if param.get('start_date'):
261
+ try:
262
+ start_date = datetime.strptime(param['start_date'], '%Y-%m-%d').replace(tzinfo=pytz.UTC)
263
+ log(f"Fetching messages from {param['start_date']} onward", LogLevel.INFO)
264
+ except ValueError as e:
265
+ log(f"Invalid start_date format: {str(e)}. Defaulting to current time.", LogLevel.WARNING)
266
+
267
+ offset_date : datetime = start_date if start_date else last_message_date + timedelta(minutes=1)
268
+
269
+ async with TelegramClient(session_file, param['api_id'], param['api_hash']) as client:
270
+ try:
271
+ if not await client.is_user_authorized():
272
+ try:
273
+ await client.start(phone=param['phone'])
274
+ except SessionPasswordNeededError:
275
+ password: str = input("Two-factor authentication enabled. Enter your password: ")
276
+ await client.start(phone=param['phone'], password=password)
277
+ except FloodWaitError as e:
278
+ log(f"Flood wait error: Please wait {e.seconds} seconds", LogLevel.ERROR)
279
+ return
280
+ except Exception as e:
281
+ log(f"Authorization failed: {str(e)}", LogLevel.ERROR)
282
+ return
283
+ try:
284
+ channel: Any = await client.get_entity(param['channel_username'])
285
+ log(f"Connected to channel: {channel.title}", LogLevel.INFO)
286
+ except Exception as e:
287
+ log(f"Failed to access channel {param['channel_username']}: {str(e)}", LogLevel.ERROR)
288
+ return
289
+
290
+ last_message_date: datetime = offset_date
291
+ oldest_message, newest_message = None, None # type: ignore
292
+ while True:
293
+ tm1 = datetime(datetime.now().year, datetime.now().month, datetime.now().day) + timedelta(days=-1)
294
+ tm1 = tm1.astimezone(pytz.UTC)
295
+
296
+ messages = []
297
+ for username in param['users_filter']:
298
+ _messages = await client.get_messages(
299
+ channel,
300
+ limit=100,
301
+ from_user=username,
302
+ offset_date=last_message_date # offset_date is the cutoff
303
+ )
304
+ messages = messages + _messages
305
+ log(f"Fetched {len(messages)} raw messages with offset_date={last_message_date.isoformat()}", LogLevel.INFO)
306
+
307
+ '''
308
+ Sliding Window: way we increment 'last_message_date' (The cutoff), it's possible we miss some messages.
309
+ However, if we're moving the sliding window too slowly, it'd take forever to scan.
310
+ And if you hit their API too frequently:
311
+ Sleeping for 20s (0:00:20) on GetHistoryRequest flood wait
312
+ TG won't ban your account, but still you'd need to wait.
313
+ Adjust how fast you increment below to suit your purpose.
314
+ '''
315
+ realtime_cutoff = (datetime.now() + timedelta(minutes=-3)).astimezone(pytz.UTC)
316
+ relevant_messages = [ msg for msg in messages if (msg.sender.username.lower().strip() if msg.sender and msg.sender.username else str(msg.sender_id)) in param['users_filter'] ]
317
+ if not relevant_messages:
318
+ last_message_date = last_message_date + timedelta(hours=1)
319
+ if last_message_date>realtime_cutoff:
320
+ last_message_date = realtime_cutoff
321
+ continue
322
+ else:
323
+ sorted_messages = sorted(messages, key=lambda m: m.date)
324
+ last_message_date = sorted_messages[-1].date + timedelta(minutes=5) if sorted_messages[-1].date + timedelta(minutes=5)>last_message_date else last_message_date + timedelta(hours=1)
325
+ if last_message_date>realtime_cutoff:
326
+ last_message_date = realtime_cutoff
327
+
328
+ for message in sorted_messages: # Process oldest to newest
329
+ if not isinstance(message, Message):
330
+ continue
331
+ sender = await message.get_sender() # type: ignore
332
+ sender_name: Union[str, int] = sender.username if sender and sender.username else message.sender_id # type: ignore
333
+ sender_name = str(sender_name).lower().strip()
334
+ message_date: datetime = message.date # type: ignore
335
+ if message_date.tzinfo is None:
336
+ message_date = pytz.UTC.localize(message_date)
337
+ else:
338
+ message_date = message_date.astimezone(pytz.UTC)
339
+
340
+ message_text: str = message.message or ""
341
+ message_text = re.sub(r'[^a-zA-Z0-9\s.!?]', '', message_text)
342
+ message_text = message_text.replace(',', '')
343
+
344
+ message_data: Dict[str, Any] = {
345
+ "timestamp_ms": int(message_date.timestamp() * 1000),
346
+ "datetime": message_date.isoformat(), # Always in UTC
347
+ "sender": sender_name,
348
+ "message": message_text
349
+ }
350
+
351
+ json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
352
+ message_hash: str = hashlib.sha256(json_str.encode('utf-8')).hexdigest()
353
+
354
+ if (
355
+ (
356
+ not param['users_filter']
357
+ or (param['users_filter'] and sender_name in param['users_filter'])
358
+ )
359
+ and message_hash not in seen_hashes
360
+ ):
361
+ seen_hashes.add(message_hash)
362
+ processed_messages.append(message_data)
363
+
364
+ if message_date>tm1:
365
+ dispatch_notification(title=f"{param['current_filename']} {param['channel_username']} started", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
366
+
367
+ with open(message_cache_file, 'a', encoding='utf-8') as f:
368
+ json.dump(message_data, f, ensure_ascii=False)
369
+ f.write('\n')
370
+
371
+ if (
372
+ param['message_keywords_filter']
373
+ and any(keyword.lower().strip() in message_text.lower() for keyword in param['message_keywords_filter'])
374
+ ):
375
+ if param['alert_wav_path'] and message_date>=tm1 and sys.platform == 'win32':
376
+ import winsound
377
+ for _ in range(param['num_shouts']):
378
+ winsound.PlaySound(param['alert_wav_path'], winsound.SND_FILENAME)
379
+ log(f"Incoming! {message_data}")
380
+
381
+ if redis_client:
382
+ try:
383
+ publish_topic = f"{param['mds']['topics']['tg_alert']}_{message.id}"
384
+ redis_client.publish(publish_topic, json_str)
385
+ redis_client.setex(message_hash, param['mds']['redis']['ttl_ms'] // 1000, json_str)
386
+ log(f"Published message {message.id} to Redis topic {publish_topic}", LogLevel.INFO)
387
+ except Exception as e:
388
+ log(f"Failed to publish to Redis: {str(e)}", LogLevel.ERROR)
389
+
390
+ if processed_messages:
391
+ oldest_message: Dict[str, Any] = min(processed_messages, key=lambda x: x['timestamp_ms'])
392
+ newest_message: Dict[str, Any] = max(processed_messages, key=lambda x: x['timestamp_ms'])
393
+ log(
394
+ json.dumps(
395
+ {
396
+ 'num_messages': len(processed_messages),
397
+ 'oldest': {
398
+ 'timestamp_ms': oldest_message['timestamp_ms'],
399
+ 'datetime': datetime.fromtimestamp(int(oldest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
400
+ },
401
+ 'latest': {
402
+ 'timestamp_ms': newest_message['timestamp_ms'],
403
+ 'datetime': datetime.fromtimestamp(int(newest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
404
+ }
405
+ }, indent=2
406
+ ),
407
+ LogLevel.INFO
408
+ )
409
+ else:
410
+ log(f"No messages processed in this iteration. last_message_date: {last_message_date}", LogLevel.INFO)
411
+ last_message_date = last_message_date + timedelta(days=1)
412
+
413
+ await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
414
+
415
+ except Exception as e:
416
+ log(f"Oops {str(e)} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}", LogLevel.ERROR)
417
+
418
+ if __name__ == '__main__':
419
+ try:
420
+ asyncio.run(main())
421
+ except KeyboardInterrupt:
422
+ log("Stopped by user", LogLevel.INFO)
423
+ except Exception as e:
424
+ log(f"Unexpected error: {str(e)}", LogLevel.ERROR)
@@ -8,6 +8,7 @@ from util.trading_util import *
8
8
  '''
9
9
  Have a look at this for a visual explaination how "Gradually tightened stops" works:
10
10
  https://github.com/r0bbar/siglab/blob/master/siglab_py/tests/manual/trading_util_tests.ipynb
11
+ https://norman-lm-fung.medium.com/gradually-tightened-trailing-stops-f7854bf1e02b
11
12
  '''
12
13
 
13
14
  # @unittest.skip("Skip all integration tests.")
@@ -52,6 +52,7 @@ What's 'loss_trailing'? 'loss_trailing' is essentially pnl drop from max_unreali
52
52
 
53
53
  Have a look at this for a visual explaination how "Gradually tightened stops" works:
54
54
  https://github.com/r0bbar/siglab/blob/master/siglab_py/tests/manual/trading_util_tests.ipynb
55
+ https://norman-lm-fung.medium.com/gradually-tightened-trailing-stops-f7854bf1e02b
55
56
  '''
56
57
  def calc_eff_trailing_sl(
57
58
  tp_min_percent : float,
@@ -63,7 +64,7 @@ def calc_eff_trailing_sl(
63
64
  slope = (0 - sl_percent_trailing) / (tp_max_percent - tp_min_percent)
64
65
  effective_tp_trailing_percent = (
65
66
  slope * (pnl_percent_notional - tp_min_percent) + sl_percent_trailing
66
- if pnl_percent_notional>tp_min_percent
67
+ if pnl_percent_notional>=tp_min_percent
67
68
  else default_effective_tp_trailing_percent
68
69
  )
69
70
  return effective_tp_trailing_percent
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: siglab-py
3
- Version: 0.5.2
3
+ Version: 0.5.5
4
4
  Summary: Market data fetches, TA calculations and generic order gateway.
5
5
  Author: r0bbarh00d
6
6
  Author-email: r0bbarh00d <r0bbarh00d@gmail.com>
@@ -12,6 +12,7 @@ siglab_py/market_data_providers/deribit_options_expiry_provider.py,sha256=e9Ee8T
12
12
  siglab_py/market_data_providers/futu_candles_ta_to_csv.py,sha256=S4GXaJ7AveEh-Cm9-VhENBdlj_1CfyBTrQO7acTqfUE,10226
13
13
  siglab_py/market_data_providers/orderbooks_provider.py,sha256=olt-3LIkoyzQWfNNQRhJtKibLbkTutt_q_rCCTM7i1g,16216
14
14
  siglab_py/market_data_providers/test_provider.py,sha256=wBLCgcWjs7FGZJXWsNyn30lkOLa_cgpuvqRakMC0wbA,2221
15
+ siglab_py/market_data_providers/tg_monitor.py,sha256=wy1FqF2_FnkoBgrCUQFKm0aVrKXnwiqU-Z0B6vgjyic,21814
15
16
  siglab_py/ordergateway/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
17
  siglab_py/ordergateway/client.py,sha256=LvtrYirrdFOcKgTkvuqwdEN7r3nurjX320ESnk7tHE0,15095
17
18
  siglab_py/ordergateway/encrypt_keys_util.py,sha256=-qi87db8To8Yf1WS1Q_Cp2Ya7ZqgWlRqSHfNXCM7wE4,1339
@@ -23,7 +24,7 @@ siglab_py/tests/integration/market_data_util_tests.py,sha256=p-RWIJZLyj0lAdfi4QT
23
24
  siglab_py/tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
25
  siglab_py/tests/unit/analytic_util_tests.py,sha256=eeusM5zkQR2QyVhT7nqF0mwHVg7vlxwtgv2JnLqwQgY,3852
25
26
  siglab_py/tests/unit/market_data_util_tests.py,sha256=A1y83itISmMJdn6wLpfwcr4tGola8wTf1D1xbelMvgw,2026
26
- siglab_py/tests/unit/trading_util_tests.py,sha256=664pnAwhdwfRI21ktPpuAxNydz6XtxBrU_Dm1rTGquQ,2883
27
+ siglab_py/tests/unit/trading_util_tests.py,sha256=hkAEZHT-8Ncbm9sG-H4WRoyryJSTCj9BsWTdDX7e4hU,2970
27
28
  siglab_py/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
29
  siglab_py/util/analytic_util.py,sha256=blFJ1kY_aSJeuzzk28vdB4nhLgmosz0L8IJaJCZy9OM,47272
29
30
  siglab_py/util/aws_util.py,sha256=KGmjHrr1rpnnxr33nXHNzTul4tvyyxl9p6gpwNv0Ygc,2557
@@ -31,8 +32,8 @@ siglab_py/util/market_data_util.py,sha256=mUXg4uaiX3b6_klgJWIEgnUQU4IUd6CwTOqKLi
31
32
  siglab_py/util/notification_util.py,sha256=vySgHjpHgwFDLW0tHSi_AGh9JBbPc25IUgvWxmjAeT8,2658
32
33
  siglab_py/util/retry_util.py,sha256=g-UU6pkPouWZZRZEqP99R2Z0lX5xzckYkzjwqqSDpVQ,922
33
34
  siglab_py/util/slack_notification_util.py,sha256=G27n-adbT3Q6oaHSMvu_Nom794rrda5PprSF-zvmzkM,1912
34
- siglab_py/util/trading_util.py,sha256=Z-sK4cTi-5rIXYVgmIMgOWlIaxcCEX1lXlCKTweg6-U,3233
35
- siglab_py-0.5.2.dist-info/METADATA,sha256=yL1RdzFxrjYNgzdB18G-XtbaM-HLC0NWeWcqXLws6Vw,979
36
- siglab_py-0.5.2.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
37
- siglab_py-0.5.2.dist-info/top_level.txt,sha256=AbD4VR9OqmMOGlMJLkAVPGQMtUPIQv0t1BF5xmcLJSk,10
38
- siglab_py-0.5.2.dist-info/RECORD,,
35
+ siglab_py/util/trading_util.py,sha256=rXiBLOJGxerK1Uz1D1GBLedunTobtDCC85d42on4TYg,3321
36
+ siglab_py-0.5.5.dist-info/METADATA,sha256=AiiTUyYs_Pd4mAgZexM9fLJ-AMW1uFfBYTDZ-jJzAQw,979
37
+ siglab_py-0.5.5.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
38
+ siglab_py-0.5.5.dist-info/top_level.txt,sha256=AbD4VR9OqmMOGlMJLkAVPGQMtUPIQv0t1BF5xmcLJSk,10
39
+ siglab_py-0.5.5.dist-info/RECORD,,