siglab-py 0.5.72__py3-none-any.whl → 0.5.73__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of siglab-py might be problematic. Click here for more details.
- siglab_py/market_data_providers/google_monitor.py +320 -0
- siglab_py/market_data_providers/tg_monitor.py +6 -2
- siglab_py/util/market_data_util.py +6 -3
- {siglab_py-0.5.72.dist-info → siglab_py-0.5.73.dist-info}/METADATA +1 -1
- {siglab_py-0.5.72.dist-info → siglab_py-0.5.73.dist-info}/RECORD +7 -6
- {siglab_py-0.5.72.dist-info → siglab_py-0.5.73.dist-info}/WHEEL +1 -1
- {siglab_py-0.5.72.dist-info → siglab_py-0.5.73.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import sys
|
|
3
|
+
import traceback
|
|
4
|
+
import os
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import hashlib
|
|
8
|
+
import re
|
|
9
|
+
from datetime import datetime, timedelta, timezone
|
|
10
|
+
import time
|
|
11
|
+
import pytz
|
|
12
|
+
import arrow
|
|
13
|
+
from enum import Enum
|
|
14
|
+
import logging
|
|
15
|
+
import requests
|
|
16
|
+
from typing import Dict, Optional, Set, Any, Union, List
|
|
17
|
+
from redis import StrictRedis
|
|
18
|
+
|
|
19
|
+
from siglab_py.util.notification_util import dispatch_notification
|
|
20
|
+
|
|
21
|
+
current_filename = os.path.basename(__file__)
|
|
22
|
+
|
|
23
|
+
'''
|
|
24
|
+
google_monitor fetches messages from particular query. Then:
|
|
25
|
+
a. Save (and accumulate) messages to message cache file (No duplicates) for further analysis.
|
|
26
|
+
message_cache_file: str = f"google_search_messages.json"
|
|
27
|
+
|
|
28
|
+
b. If any of keywords in message_keywords_filter matches words in message (--message_keywords_filter):
|
|
29
|
+
- Publish to redis for strategy consumption, topic: param['mds']['topics']['google_alert']
|
|
30
|
+
- Dispatch slack alert
|
|
31
|
+
- If scripts runs on Windows, play a wav file (Feels free make modification play sounds on Ubuntu for example)
|
|
32
|
+
|
|
33
|
+
Usage:
|
|
34
|
+
set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
|
|
35
|
+
python google_monitor.py --apikey xxx --search_engine_id yyy --query "site:twitter.com @user_id1 @user_id2 some topic" --slack_info_url=https://hooks.slack.com/services/xxx --slack_critial_url=https://hooks.slack.com/services/xxx --slack_alert_url=https://hooks.slack.com/services/xxx
|
|
36
|
+
|
|
37
|
+
alert_wav_path
|
|
38
|
+
Point it to wav file for alert notification. It's using 'winsound', i.e. Windows only.
|
|
39
|
+
Set to None otherwise.
|
|
40
|
+
|
|
41
|
+
Google API: https://console.cloud.google.com/apis/credentials?project=YOUR_PROJECT
|
|
42
|
+
name: YOUR_API_KEY_NAME
|
|
43
|
+
apikey: ?????
|
|
44
|
+
|
|
45
|
+
Google Search Engine
|
|
46
|
+
To create
|
|
47
|
+
name: siglab_py_search: https://programmablesearchengine.google.com/controlpanel/create
|
|
48
|
+
<script async src="https://cse.google.com/cse.js?cx=YOUR_SEARCH_ENGINE_ID">
|
|
49
|
+
</script>
|
|
50
|
+
<div class="gcse-search"></div>
|
|
51
|
+
Then enable it: https://console.developers.google.com/apis/api/customsearch.googleapis.com/overview?project=?????
|
|
52
|
+
|
|
53
|
+
launch.json for Debugging from VSCode:
|
|
54
|
+
{
|
|
55
|
+
"version": "0.2.0",
|
|
56
|
+
"configurations": [
|
|
57
|
+
{
|
|
58
|
+
"name": "Python: Current File",
|
|
59
|
+
"type": "python",
|
|
60
|
+
"request": "launch",
|
|
61
|
+
"program": "${file}",
|
|
62
|
+
"console": "integratedTerminal",
|
|
63
|
+
"justMyCode": false,
|
|
64
|
+
"args" : [
|
|
65
|
+
"--apikey", "xxx",
|
|
66
|
+
"--search_engine_id", "yyy",
|
|
67
|
+
"--query", "site:twitter.com @user_id1 @user_id2 some topic",
|
|
68
|
+
"--slack_info_url", "https://hooks.slack.com/services/xxx",
|
|
69
|
+
"--slack_critial_url", "https://hooks.slack.com/services/xxx",
|
|
70
|
+
"--slack_alert_url", "https://hooks.slack.com/services/xxx",
|
|
71
|
+
],
|
|
72
|
+
}
|
|
73
|
+
]
|
|
74
|
+
}
|
|
75
|
+
'''
|
|
76
|
+
|
|
77
|
+
param: Dict[str, Any] = {
|
|
78
|
+
'apikey': os.getenv('GOOGLE_APIKEY', 'xxx'),
|
|
79
|
+
'search_engine_id': os.getenv('GOOGLE_SEARCH_ENGINE_ID', 'xxx'),
|
|
80
|
+
'num_results' : 10,
|
|
81
|
+
'query' : '',
|
|
82
|
+
'alert_wav_path' : r"d:\sounds\terrible.wav",
|
|
83
|
+
"num_shouts" : 5, # How many times 'alert_wav_path' is played
|
|
84
|
+
"loop_freq_ms" : 1000*60*15, # Google allow max 100 calls per day free.
|
|
85
|
+
'current_filename' : current_filename,
|
|
86
|
+
|
|
87
|
+
'notification' : {
|
|
88
|
+
'footer' : None,
|
|
89
|
+
|
|
90
|
+
# slack webhook url's for notifications
|
|
91
|
+
'slack' : {
|
|
92
|
+
'info' : { 'webhook_url' : None },
|
|
93
|
+
'critical' : { 'webhook_url' : None },
|
|
94
|
+
'alert' : { 'webhook_url' : None },
|
|
95
|
+
}
|
|
96
|
+
},
|
|
97
|
+
|
|
98
|
+
'mds': {
|
|
99
|
+
'topics': {
|
|
100
|
+
'tg_alert': 'tg_alert'
|
|
101
|
+
},
|
|
102
|
+
'redis': {
|
|
103
|
+
'host': 'localhost',
|
|
104
|
+
'port': 6379,
|
|
105
|
+
'db': 0,
|
|
106
|
+
'ttl_ms': 1000 * 60 * 15
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
class LogLevel(Enum):
|
|
112
|
+
CRITICAL = 50
|
|
113
|
+
ERROR = 40
|
|
114
|
+
WARNING = 30
|
|
115
|
+
INFO = 20
|
|
116
|
+
DEBUG = 10
|
|
117
|
+
NOTSET = 0
|
|
118
|
+
|
|
119
|
+
logging.Formatter.converter = time.gmtime
|
|
120
|
+
logger: logging.Logger = logging.getLogger()
|
|
121
|
+
log_level: int = logging.INFO
|
|
122
|
+
logger.setLevel(log_level)
|
|
123
|
+
format_str: str = '%(asctime)s %(message)s'
|
|
124
|
+
formatter: logging.Formatter = logging.Formatter(format_str)
|
|
125
|
+
sh: logging.StreamHandler = logging.StreamHandler()
|
|
126
|
+
sh.setLevel(log_level)
|
|
127
|
+
sh.setFormatter(formatter)
|
|
128
|
+
logger.addHandler(sh)
|
|
129
|
+
|
|
130
|
+
def log(message: str, log_level: LogLevel = LogLevel.INFO) -> None:
|
|
131
|
+
if log_level.value < LogLevel.WARNING.value:
|
|
132
|
+
logger.info(f"{datetime.now()} {message}")
|
|
133
|
+
elif log_level.value == LogLevel.WARNING.value:
|
|
134
|
+
logger.warning(f"{datetime.now()} {message}")
|
|
135
|
+
elif log_level.value == LogLevel.ERROR.value:
|
|
136
|
+
logger.error(f"{datetime.now()} {message}")
|
|
137
|
+
|
|
138
|
+
def parse_args():
|
|
139
|
+
parser = argparse.ArgumentParser() # type: ignore
|
|
140
|
+
parser.add_argument("--apikey", help="API key", default=None)
|
|
141
|
+
parser.add_argument("--search_engine_id", help="Google search engine ID", default=None)
|
|
142
|
+
parser.add_argument("--num_results", help="Max number items to fetch", default=10)
|
|
143
|
+
parser.add_argument("--query", help="Query - what are you looking for?", default=None)
|
|
144
|
+
parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
|
|
145
|
+
parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
|
|
146
|
+
parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
|
|
147
|
+
|
|
148
|
+
args = parser.parse_args()
|
|
149
|
+
|
|
150
|
+
param['apikey'] = args.apikey
|
|
151
|
+
param['search_engine_id'] = args.search_engine_id
|
|
152
|
+
param['num_results'] = args.num_results
|
|
153
|
+
param['query'] = args.query
|
|
154
|
+
|
|
155
|
+
param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
|
|
156
|
+
param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
|
|
157
|
+
param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
|
|
158
|
+
|
|
159
|
+
param['notification']['footer'] = f"From {param['current_filename']}"
|
|
160
|
+
|
|
161
|
+
print(f"Startup args: {args}") # Dont use logger, not yet setup yet.
|
|
162
|
+
print(f"param: {print(json.dumps(param, indent=2))}")
|
|
163
|
+
|
|
164
|
+
def init_redis_client() -> StrictRedis:
|
|
165
|
+
redis_client : StrictRedis = StrictRedis(
|
|
166
|
+
host = param['mds']['redis']['host'],
|
|
167
|
+
port = param['mds']['redis']['port'],
|
|
168
|
+
db = 0,
|
|
169
|
+
ssl = False
|
|
170
|
+
)
|
|
171
|
+
try:
|
|
172
|
+
redis_client.keys()
|
|
173
|
+
except ConnectionError as redis_conn_error:
|
|
174
|
+
err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
|
|
175
|
+
log(f"Failed to init redis connection. Will skip publishes to redis. {err_msg}")
|
|
176
|
+
redis_client = None # type: ignore
|
|
177
|
+
|
|
178
|
+
return redis_client
|
|
179
|
+
|
|
180
|
+
def search_google_custom(query, api_key, search_engine_id, num_results=10):
|
|
181
|
+
url = 'https://www.googleapis.com/customsearch/v1'
|
|
182
|
+
params = {
|
|
183
|
+
'key': api_key,
|
|
184
|
+
'cx': search_engine_id,
|
|
185
|
+
'q': query,
|
|
186
|
+
'num': num_results,
|
|
187
|
+
'sort': 'date',
|
|
188
|
+
'dateRestrict': 'd1' # Restrict to most recent (adjust as needed: d1=day, m1=month, etc.)
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
response = requests.get(url, params=params)
|
|
192
|
+
|
|
193
|
+
if response.status_code == 200:
|
|
194
|
+
return response.json()
|
|
195
|
+
else:
|
|
196
|
+
log(f"Query error: {response.status_code} - {response.text}")
|
|
197
|
+
return None
|
|
198
|
+
|
|
199
|
+
async def main() -> None:
|
|
200
|
+
parse_args()
|
|
201
|
+
|
|
202
|
+
message_cache_file: str = f"google_search_messages.json"
|
|
203
|
+
log(f"message_cache_file: {message_cache_file}")
|
|
204
|
+
|
|
205
|
+
notification_params : Dict[str, Any] = param['notification']
|
|
206
|
+
|
|
207
|
+
processed_messages : List[Dict[str, Any]] = []
|
|
208
|
+
seen_hashes : Set[str] = set()
|
|
209
|
+
if os.path.exists(message_cache_file):
|
|
210
|
+
with open(message_cache_file, 'r', encoding='utf-8') as f:
|
|
211
|
+
lines = f.readlines()
|
|
212
|
+
for line in lines:
|
|
213
|
+
message_data = json.loads(line)
|
|
214
|
+
message_hash: str = hashlib.sha256(message_data['message'].encode('utf-8')).hexdigest()
|
|
215
|
+
|
|
216
|
+
message_data['datetime'] = pytz.UTC.localize(arrow.get(message_data['datetime']).datetime.replace(tzinfo=None))
|
|
217
|
+
|
|
218
|
+
if message_hash not in seen_hashes:
|
|
219
|
+
seen_hashes.add(message_hash)
|
|
220
|
+
processed_messages.append(message_data)
|
|
221
|
+
|
|
222
|
+
processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
redis_client: Optional[StrictRedis] = init_redis_client()
|
|
226
|
+
except Exception as redis_err:
|
|
227
|
+
redis_client = None
|
|
228
|
+
log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
|
|
229
|
+
|
|
230
|
+
while True:
|
|
231
|
+
try:
|
|
232
|
+
results = search_google_custom(param['query'], param['apikey'], param['search_engine_id'], param['num_results'])
|
|
233
|
+
|
|
234
|
+
if results:
|
|
235
|
+
if 'items' in results:
|
|
236
|
+
for item in results['items']:
|
|
237
|
+
title = item.get('title', 'No title')
|
|
238
|
+
snippet = item.get('snippet', 'No snippet')
|
|
239
|
+
link = item.get('link', 'No link')
|
|
240
|
+
published_date = item.get('pagemap', {}).get('metatags', [{}])[0].get('article:published_time', 'No date')
|
|
241
|
+
|
|
242
|
+
dt_message = datetime.now()
|
|
243
|
+
pattern = r'^\d+\s*(?:days?|day?|hours?|hour?|minutes?|minute?|seconds?|second?|h|m|s)\s*(?:ago)?.*?([A-Za-z]+\s+\d+,\s+\d{4},\s+\d+:\d+\s+[AP]M\s+ET)'
|
|
244
|
+
match = re.match(pattern, snippet)
|
|
245
|
+
if published_date == 'No date' and match:
|
|
246
|
+
published_date = match.group(1)
|
|
247
|
+
dt_message = datetime.strptime(published_date, '%b %d, %Y, %I:%M %p ET')
|
|
248
|
+
|
|
249
|
+
snippet = re.sub(pattern, '', snippet).strip()
|
|
250
|
+
|
|
251
|
+
timestamp_ms = int(dt_message.timestamp() * 1000)
|
|
252
|
+
message_data: Dict[str, Any] = {
|
|
253
|
+
"timestamp_ms": timestamp_ms,
|
|
254
|
+
"datetime": dt_message.isoformat(), # Always in UTC
|
|
255
|
+
"title" : title,
|
|
256
|
+
"message": snippet,
|
|
257
|
+
"url" : link
|
|
258
|
+
}
|
|
259
|
+
json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
|
|
260
|
+
message_hash: str = hashlib.sha256(snippet.encode('utf-8')).hexdigest()
|
|
261
|
+
if (message_hash not in seen_hashes):
|
|
262
|
+
seen_hashes.add(message_hash)
|
|
263
|
+
processed_messages.append(message_data)
|
|
264
|
+
|
|
265
|
+
log(f"{message_data}")
|
|
266
|
+
|
|
267
|
+
dispatch_notification(title=f"{param['current_filename']} Incoming! {title}", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
|
|
268
|
+
|
|
269
|
+
with open(message_cache_file, 'a', encoding='utf-8') as f:
|
|
270
|
+
json.dump(message_data, f, ensure_ascii=False)
|
|
271
|
+
f.write('\n')
|
|
272
|
+
|
|
273
|
+
if param['alert_wav_path']and sys.platform == 'win32':
|
|
274
|
+
import winsound
|
|
275
|
+
for _ in range(param['num_shouts']):
|
|
276
|
+
winsound.PlaySound(param['alert_wav_path'], winsound.SND_FILENAME)
|
|
277
|
+
|
|
278
|
+
if redis_client:
|
|
279
|
+
try:
|
|
280
|
+
publish_topic = f"google_search"
|
|
281
|
+
redis_client.publish(publish_topic, json_str)
|
|
282
|
+
redis_client.setex(message_hash, param['mds']['redis']['ttl_ms'] // 1000, json_str)
|
|
283
|
+
log(f"Published message {json_str} to Redis topic {publish_topic}", LogLevel.INFO)
|
|
284
|
+
except Exception as e:
|
|
285
|
+
log(f"Failed to publish to Redis: {str(e)}", LogLevel.ERROR)
|
|
286
|
+
|
|
287
|
+
await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
|
|
288
|
+
|
|
289
|
+
if processed_messages:
|
|
290
|
+
oldest_message: Dict[str, Any] = min(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
291
|
+
newest_message: Dict[str, Any] = max(processed_messages, key=lambda x: x['timestamp_ms'])
|
|
292
|
+
log(
|
|
293
|
+
json.dumps(
|
|
294
|
+
{
|
|
295
|
+
'num_messages': len(processed_messages),
|
|
296
|
+
'oldest': {
|
|
297
|
+
'timestamp_ms': oldest_message['timestamp_ms'],
|
|
298
|
+
'datetime': datetime.fromtimestamp(int(oldest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
299
|
+
},
|
|
300
|
+
'latest': {
|
|
301
|
+
'timestamp_ms': newest_message['timestamp_ms'],
|
|
302
|
+
'datetime': datetime.fromtimestamp(int(newest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
|
|
303
|
+
}
|
|
304
|
+
}, indent=2
|
|
305
|
+
),
|
|
306
|
+
LogLevel.INFO
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
except Exception as e:
|
|
310
|
+
log(f"Oops {str(e)} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}", LogLevel.ERROR)
|
|
311
|
+
finally:
|
|
312
|
+
await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
|
|
313
|
+
|
|
314
|
+
if __name__ == '__main__':
|
|
315
|
+
try:
|
|
316
|
+
asyncio.run(main())
|
|
317
|
+
except KeyboardInterrupt:
|
|
318
|
+
log("Stopped by user", LogLevel.INFO)
|
|
319
|
+
except Exception as e:
|
|
320
|
+
log(f"Unexpected error: {str(e)}", LogLevel.ERROR)
|
|
@@ -254,7 +254,11 @@ async def main() -> None:
|
|
|
254
254
|
processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
|
|
255
255
|
last_message_date = processed_messages[-1]['datetime']
|
|
256
256
|
|
|
257
|
-
|
|
257
|
+
try:
|
|
258
|
+
redis_client: Optional[StrictRedis] = init_redis_client()
|
|
259
|
+
except Exception as redis_err:
|
|
260
|
+
redis_client = None
|
|
261
|
+
log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
|
|
258
262
|
|
|
259
263
|
start_date: Optional[datetime] = None
|
|
260
264
|
if param.get('start_date'):
|
|
@@ -362,7 +366,7 @@ async def main() -> None:
|
|
|
362
366
|
processed_messages.append(message_data)
|
|
363
367
|
|
|
364
368
|
if message_date>tm1:
|
|
365
|
-
dispatch_notification(title=f"{param['current_filename']} {param['channel_username']}
|
|
369
|
+
dispatch_notification(title=f"{param['current_filename']} {param['channel_username']} Incoming!", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
|
|
366
370
|
|
|
367
371
|
with open(message_cache_file, 'a', encoding='utf-8') as f:
|
|
368
372
|
json.dump(message_data, f, ensure_ascii=False)
|
|
@@ -83,7 +83,8 @@ async def async_instantiate_exchange(
|
|
|
83
83
|
secret : str,
|
|
84
84
|
passphrase : str,
|
|
85
85
|
default_type : str,
|
|
86
|
-
rate_limit_ms : float = 100
|
|
86
|
+
rate_limit_ms : float = 100,
|
|
87
|
+
verbose : bool = False
|
|
87
88
|
) -> Union[AnyExchange, None]:
|
|
88
89
|
exchange : Union[AnyExchange, None] = None
|
|
89
90
|
exchange_name : str = gateway_id.split('_')[0]
|
|
@@ -98,7 +99,8 @@ async def async_instantiate_exchange(
|
|
|
98
99
|
'rateLimit' : rate_limit_ms,
|
|
99
100
|
'options' : {
|
|
100
101
|
'defaultType' : default_type
|
|
101
|
-
}
|
|
102
|
+
},
|
|
103
|
+
'verbose': verbose
|
|
102
104
|
}
|
|
103
105
|
|
|
104
106
|
if exchange_name=='binance':
|
|
@@ -156,7 +158,8 @@ async def async_instantiate_exchange(
|
|
|
156
158
|
"walletAddress" : api_key,
|
|
157
159
|
"privateKey" : secret,
|
|
158
160
|
'enableRateLimit' : True,
|
|
159
|
-
'rateLimit' : rate_limit_ms
|
|
161
|
+
'rateLimit' : rate_limit_ms,
|
|
162
|
+
'verbose': verbose
|
|
160
163
|
}
|
|
161
164
|
) # type: ignore
|
|
162
165
|
else:
|
|
@@ -10,9 +10,10 @@ siglab_py/market_data_providers/candles_ta_provider.py,sha256=uiAhbEZZdTF-YulBHp
|
|
|
10
10
|
siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py,sha256=DHj51QTbkCmEd9RFNVhWWpsSPz1aLd6zTLqkUUbEkK0,11158
|
|
11
11
|
siglab_py/market_data_providers/deribit_options_expiry_provider.py,sha256=e9Ee8TmC8pXaid8-jouSLKIpuW6_JBBgwRTieI665yQ,8684
|
|
12
12
|
siglab_py/market_data_providers/futu_candles_ta_to_csv.py,sha256=SCWlI_mOuErpGP8Kxh5WKEoff9cqqxO19oLFLd04bTs,10964
|
|
13
|
+
siglab_py/market_data_providers/google_monitor.py,sha256=B08Aj1urL4M9hVUfjubVwTsFhfsj5-eFaf36lYqZ8-o,14028
|
|
13
14
|
siglab_py/market_data_providers/orderbooks_provider.py,sha256=olt-3LIkoyzQWfNNQRhJtKibLbkTutt_q_rCCTM7i1g,16216
|
|
14
15
|
siglab_py/market_data_providers/test_provider.py,sha256=wBLCgcWjs7FGZJXWsNyn30lkOLa_cgpuvqRakMC0wbA,2221
|
|
15
|
-
siglab_py/market_data_providers/tg_monitor.py,sha256=
|
|
16
|
+
siglab_py/market_data_providers/tg_monitor.py,sha256=LY4oRm5qQ_XiuLk0RMVDc0Vdsi6CKE6O9hgeGm3VXBM,21995
|
|
16
17
|
siglab_py/ordergateway/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
18
|
siglab_py/ordergateway/client.py,sha256=LvtrYirrdFOcKgTkvuqwdEN7r3nurjX320ESnk7tHE0,15095
|
|
18
19
|
siglab_py/ordergateway/encrypt_keys_util.py,sha256=U_M-jPrPYOTO_sU0bMVkO5ruNXge5vek8yUGa8jaE-g,1349
|
|
@@ -29,13 +30,13 @@ siglab_py/tests/unit/trading_util_tests.py,sha256=LiflZrduWXyLMbpSFQCaydA7jdJx3v
|
|
|
29
30
|
siglab_py/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
30
31
|
siglab_py/util/analytic_util.py,sha256=ywp-VI8UlmoYVej2SaJMrOyheFwyh9KVjsnfw55dpMU,63785
|
|
31
32
|
siglab_py/util/aws_util.py,sha256=KGmjHrr1rpnnxr33nXHNzTul4tvyyxl9p6gpwNv0Ygc,2557
|
|
32
|
-
siglab_py/util/market_data_util.py,sha256=
|
|
33
|
+
siglab_py/util/market_data_util.py,sha256=udu7EN8wsESznoA68HBBKlMqZdv66FkQaRCketoFhfo,33165
|
|
33
34
|
siglab_py/util/notification_util.py,sha256=tNZMUkkjz4q1CKqcQq62oEmZgHgNIwz2Iw9J22V22Zw,2668
|
|
34
35
|
siglab_py/util/retry_util.py,sha256=g-UU6pkPouWZZRZEqP99R2Z0lX5xzckYkzjwqqSDpVQ,922
|
|
35
36
|
siglab_py/util/simple_math.py,sha256=F7vGj0O2Y9EAGcMFR6SN1tTjBWO_a7YZeiTzk3eHaVI,8518
|
|
36
37
|
siglab_py/util/slack_notification_util.py,sha256=G27n-adbT3Q6oaHSMvu_Nom794rrda5PprSF-zvmzkM,1912
|
|
37
38
|
siglab_py/util/trading_util.py,sha256=dlIOzoMGnddLSFODcJ61EBH1Aeruq4IT2MsxIdFkV9I,5252
|
|
38
|
-
siglab_py-0.5.
|
|
39
|
-
siglab_py-0.5.
|
|
40
|
-
siglab_py-0.5.
|
|
41
|
-
siglab_py-0.5.
|
|
39
|
+
siglab_py-0.5.73.dist-info/METADATA,sha256=HirdcX03gRL8bt7NPykg2WqZSA1vsi5Q3GXDGR7UR30,829
|
|
40
|
+
siglab_py-0.5.73.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
41
|
+
siglab_py-0.5.73.dist-info/top_level.txt,sha256=AbD4VR9OqmMOGlMJLkAVPGQMtUPIQv0t1BF5xmcLJSk,10
|
|
42
|
+
siglab_py-0.5.73.dist-info/RECORD,,
|
|
File without changes
|