siglab-py 0.5.72__tar.gz → 0.5.78__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of siglab-py might be problematic. Click here for more details.

Files changed (46) hide show
  1. {siglab_py-0.5.72 → siglab_py-0.5.78}/PKG-INFO +1 -1
  2. {siglab_py-0.5.72 → siglab_py-0.5.78}/pyproject.toml +1 -1
  3. {siglab_py-0.5.72 → siglab_py-0.5.78}/setup.cfg +1 -1
  4. siglab_py-0.5.78/siglab_py/market_data_providers/google_monitor.py +320 -0
  5. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/tg_monitor.py +6 -2
  6. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/unit/analytic_util_tests.py +1 -1
  7. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/analytic_util.py +30 -1
  8. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/market_data_util.py +6 -3
  9. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py.egg-info/PKG-INFO +1 -1
  10. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py.egg-info/SOURCES.txt +1 -0
  11. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/__init__.py +0 -0
  12. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/constants.py +0 -0
  13. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/exchanges/__init__.py +0 -0
  14. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/exchanges/any_exchange.py +0 -0
  15. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/exchanges/futubull.py +0 -0
  16. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/__init__.py +0 -0
  17. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/aggregated_orderbook_provider.py +0 -0
  18. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/candles_provider.py +0 -0
  19. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/candles_ta_provider.py +0 -0
  20. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py +0 -0
  21. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/deribit_options_expiry_provider.py +0 -0
  22. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/futu_candles_ta_to_csv.py +0 -0
  23. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/orderbooks_provider.py +0 -0
  24. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/market_data_providers/test_provider.py +0 -0
  25. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/ordergateway/__init__.py +0 -0
  26. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/ordergateway/client.py +0 -0
  27. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/ordergateway/encrypt_keys_util.py +0 -0
  28. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/ordergateway/gateway.py +0 -0
  29. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/ordergateway/test_ordergateway.py +0 -0
  30. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/__init__.py +0 -0
  31. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/integration/__init__.py +0 -0
  32. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/integration/market_data_util_tests.py +0 -0
  33. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/unit/__init__.py +0 -0
  34. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/unit/market_data_util_tests.py +0 -0
  35. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/unit/simple_math_tests.py +0 -0
  36. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/tests/unit/trading_util_tests.py +0 -0
  37. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/__init__.py +0 -0
  38. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/aws_util.py +0 -0
  39. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/notification_util.py +0 -0
  40. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/retry_util.py +0 -0
  41. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/simple_math.py +0 -0
  42. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/slack_notification_util.py +0 -0
  43. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py/util/trading_util.py +0 -0
  44. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py.egg-info/dependency_links.txt +0 -0
  45. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py.egg-info/requires.txt +0 -0
  46. {siglab_py-0.5.72 → siglab_py-0.5.78}/siglab_py.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: siglab_py
3
- Version: 0.5.72
3
+ Version: 0.5.78
4
4
  Summary: Market data fetches, TA calculations and generic order gateway.
5
5
  Author: r0bbarh00d
6
6
  Author-email: r0bbarh00d <r0bbarh00d@gmail.com>
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "siglab_py"
7
- version = "0.5.72"
7
+ version = "0.5.78"
8
8
  description = "Market data fetches, TA calculations and generic order gateway."
9
9
  authors = [{name = "r0bbarh00d", email = "r0bbarh00d@gmail.com"}]
10
10
  license = {text = "MIT"}
@@ -1,6 +1,6 @@
1
1
  [metadata]
2
2
  name = siglab_py
3
- version = 0.5.72
3
+ version = 0.5.78
4
4
  description = Market data fetches, TA calculations and generic order gateway.
5
5
  author = r0bbarh00d
6
6
  author_email = r0bbarh00d@gmail.com
@@ -0,0 +1,320 @@
1
+ import asyncio
2
+ import sys
3
+ import traceback
4
+ import os
5
+ import argparse
6
+ import json
7
+ import hashlib
8
+ import re
9
+ from datetime import datetime, timedelta, timezone
10
+ import time
11
+ import pytz
12
+ import arrow
13
+ from enum import Enum
14
+ import logging
15
+ import requests
16
+ from typing import Dict, Optional, Set, Any, Union, List
17
+ from redis import StrictRedis
18
+
19
+ from siglab_py.util.notification_util import dispatch_notification
20
+
21
+ current_filename = os.path.basename(__file__)
22
+
23
+ '''
24
+ google_monitor fetches messages from particular query. Then:
25
+ a. Save (and accumulate) messages to message cache file (No duplicates) for further analysis.
26
+ message_cache_file: str = f"google_search_messages.json"
27
+
28
+ b. If any of keywords in message_keywords_filter matches words in message (--message_keywords_filter):
29
+ - Publish to redis for strategy consumption, topic: param['mds']['topics']['google_alert']
30
+ - Dispatch slack alert
31
+ - If scripts runs on Windows, play a wav file (Feels free make modification play sounds on Ubuntu for example)
32
+
33
+ Usage:
34
+ set PYTHONPATH=%PYTHONPATH%;D:\dev\siglab\siglab_py
35
+ python google_monitor.py --apikey xxx --search_engine_id yyy --query "site:twitter.com @user_id1 @user_id2 some topic" --slack_info_url=https://hooks.slack.com/services/xxx --slack_critial_url=https://hooks.slack.com/services/xxx --slack_alert_url=https://hooks.slack.com/services/xxx
36
+
37
+ alert_wav_path
38
+ Point it to wav file for alert notification. It's using 'winsound', i.e. Windows only.
39
+ Set to None otherwise.
40
+
41
+ Google API: https://console.cloud.google.com/apis/credentials?project=YOUR_PROJECT
42
+ name: YOUR_API_KEY_NAME
43
+ apikey: ?????
44
+
45
+ Google Search Engine
46
+ To create
47
+ name: siglab_py_search: https://programmablesearchengine.google.com/controlpanel/create
48
+ <script async src="https://cse.google.com/cse.js?cx=YOUR_SEARCH_ENGINE_ID">
49
+ </script>
50
+ <div class="gcse-search"></div>
51
+ Then enable it: https://console.developers.google.com/apis/api/customsearch.googleapis.com/overview?project=?????
52
+
53
+ launch.json for Debugging from VSCode:
54
+ {
55
+ "version": "0.2.0",
56
+ "configurations": [
57
+ {
58
+ "name": "Python: Current File",
59
+ "type": "python",
60
+ "request": "launch",
61
+ "program": "${file}",
62
+ "console": "integratedTerminal",
63
+ "justMyCode": false,
64
+ "args" : [
65
+ "--apikey", "xxx",
66
+ "--search_engine_id", "yyy",
67
+ "--query", "site:twitter.com @user_id1 @user_id2 some topic",
68
+ "--slack_info_url", "https://hooks.slack.com/services/xxx",
69
+ "--slack_critial_url", "https://hooks.slack.com/services/xxx",
70
+ "--slack_alert_url", "https://hooks.slack.com/services/xxx",
71
+ ],
72
+ }
73
+ ]
74
+ }
75
+ '''
76
+
77
+ param: Dict[str, Any] = {
78
+ 'apikey': os.getenv('GOOGLE_APIKEY', 'xxx'),
79
+ 'search_engine_id': os.getenv('GOOGLE_SEARCH_ENGINE_ID', 'xxx'),
80
+ 'num_results' : 10,
81
+ 'query' : '',
82
+ 'alert_wav_path' : r"d:\sounds\terrible.wav",
83
+ "num_shouts" : 5, # How many times 'alert_wav_path' is played
84
+ "loop_freq_ms" : 1000*60*15, # Google allow max 100 calls per day free.
85
+ 'current_filename' : current_filename,
86
+
87
+ 'notification' : {
88
+ 'footer' : None,
89
+
90
+ # slack webhook url's for notifications
91
+ 'slack' : {
92
+ 'info' : { 'webhook_url' : None },
93
+ 'critical' : { 'webhook_url' : None },
94
+ 'alert' : { 'webhook_url' : None },
95
+ }
96
+ },
97
+
98
+ 'mds': {
99
+ 'topics': {
100
+ 'tg_alert': 'tg_alert'
101
+ },
102
+ 'redis': {
103
+ 'host': 'localhost',
104
+ 'port': 6379,
105
+ 'db': 0,
106
+ 'ttl_ms': 1000 * 60 * 15
107
+ }
108
+ }
109
+ }
110
+
111
+ class LogLevel(Enum):
112
+ CRITICAL = 50
113
+ ERROR = 40
114
+ WARNING = 30
115
+ INFO = 20
116
+ DEBUG = 10
117
+ NOTSET = 0
118
+
119
+ logging.Formatter.converter = time.gmtime
120
+ logger: logging.Logger = logging.getLogger()
121
+ log_level: int = logging.INFO
122
+ logger.setLevel(log_level)
123
+ format_str: str = '%(asctime)s %(message)s'
124
+ formatter: logging.Formatter = logging.Formatter(format_str)
125
+ sh: logging.StreamHandler = logging.StreamHandler()
126
+ sh.setLevel(log_level)
127
+ sh.setFormatter(formatter)
128
+ logger.addHandler(sh)
129
+
130
+ def log(message: str, log_level: LogLevel = LogLevel.INFO) -> None:
131
+ if log_level.value < LogLevel.WARNING.value:
132
+ logger.info(f"{datetime.now()} {message}")
133
+ elif log_level.value == LogLevel.WARNING.value:
134
+ logger.warning(f"{datetime.now()} {message}")
135
+ elif log_level.value == LogLevel.ERROR.value:
136
+ logger.error(f"{datetime.now()} {message}")
137
+
138
+ def parse_args():
139
+ parser = argparse.ArgumentParser() # type: ignore
140
+ parser.add_argument("--apikey", help="API key", default=None)
141
+ parser.add_argument("--search_engine_id", help="Google search engine ID", default=None)
142
+ parser.add_argument("--num_results", help="Max number items to fetch", default=10)
143
+ parser.add_argument("--query", help="Query - what are you looking for?", default=None)
144
+ parser.add_argument("--slack_info_url", help="Slack webhook url for INFO", default=None)
145
+ parser.add_argument("--slack_critial_url", help="Slack webhook url for CRITICAL", default=None)
146
+ parser.add_argument("--slack_alert_url", help="Slack webhook url for ALERT", default=None)
147
+
148
+ args = parser.parse_args()
149
+
150
+ param['apikey'] = args.apikey
151
+ param['search_engine_id'] = args.search_engine_id
152
+ param['num_results'] = args.num_results
153
+ param['query'] = args.query
154
+
155
+ param['notification']['slack']['info']['webhook_url'] = args.slack_info_url
156
+ param['notification']['slack']['critical']['webhook_url'] = args.slack_critial_url
157
+ param['notification']['slack']['alert']['webhook_url'] = args.slack_alert_url
158
+
159
+ param['notification']['footer'] = f"From {param['current_filename']}"
160
+
161
+ print(f"Startup args: {args}") # Dont use logger, not yet setup yet.
162
+ print(f"param: {print(json.dumps(param, indent=2))}")
163
+
164
+ def init_redis_client() -> StrictRedis:
165
+ redis_client : StrictRedis = StrictRedis(
166
+ host = param['mds']['redis']['host'],
167
+ port = param['mds']['redis']['port'],
168
+ db = 0,
169
+ ssl = False
170
+ )
171
+ try:
172
+ redis_client.keys()
173
+ except ConnectionError as redis_conn_error:
174
+ err_msg = f"Failed to connect to redis: {param['mds']['redis']['host']}, port: {param['mds']['redis']['port']}"
175
+ log(f"Failed to init redis connection. Will skip publishes to redis. {err_msg}")
176
+ redis_client = None # type: ignore
177
+
178
+ return redis_client
179
+
180
+ def search_google_custom(query, api_key, search_engine_id, num_results=10):
181
+ url = 'https://www.googleapis.com/customsearch/v1'
182
+ params = {
183
+ 'key': api_key,
184
+ 'cx': search_engine_id,
185
+ 'q': query,
186
+ 'num': num_results,
187
+ 'sort': 'date',
188
+ 'dateRestrict': 'd1' # Restrict to most recent (adjust as needed: d1=day, m1=month, etc.)
189
+ }
190
+
191
+ response = requests.get(url, params=params)
192
+
193
+ if response.status_code == 200:
194
+ return response.json()
195
+ else:
196
+ log(f"Query error: {response.status_code} - {response.text}")
197
+ return None
198
+
199
+ async def main() -> None:
200
+ parse_args()
201
+
202
+ message_cache_file: str = f"google_search_messages.json"
203
+ log(f"message_cache_file: {message_cache_file}")
204
+
205
+ notification_params : Dict[str, Any] = param['notification']
206
+
207
+ processed_messages : List[Dict[str, Any]] = []
208
+ seen_hashes : Set[str] = set()
209
+ if os.path.exists(message_cache_file):
210
+ with open(message_cache_file, 'r', encoding='utf-8') as f:
211
+ lines = f.readlines()
212
+ for line in lines:
213
+ message_data = json.loads(line)
214
+ message_hash: str = hashlib.sha256(message_data['message'].encode('utf-8')).hexdigest()
215
+
216
+ message_data['datetime'] = pytz.UTC.localize(arrow.get(message_data['datetime']).datetime.replace(tzinfo=None))
217
+
218
+ if message_hash not in seen_hashes:
219
+ seen_hashes.add(message_hash)
220
+ processed_messages.append(message_data)
221
+
222
+ processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
223
+
224
+ try:
225
+ redis_client: Optional[StrictRedis] = init_redis_client()
226
+ except Exception as redis_err:
227
+ redis_client = None
228
+ log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
229
+
230
+ while True:
231
+ try:
232
+ results = search_google_custom(param['query'], param['apikey'], param['search_engine_id'], param['num_results'])
233
+
234
+ if results:
235
+ if 'items' in results:
236
+ for item in results['items']:
237
+ title = item.get('title', 'No title')
238
+ snippet = item.get('snippet', 'No snippet')
239
+ link = item.get('link', 'No link')
240
+ published_date = item.get('pagemap', {}).get('metatags', [{}])[0].get('article:published_time', 'No date')
241
+
242
+ dt_message = datetime.now()
243
+ pattern = r'^\d+\s*(?:days?|day?|hours?|hour?|minutes?|minute?|seconds?|second?|h|m|s)\s*(?:ago)?.*?([A-Za-z]+\s+\d+,\s+\d{4},\s+\d+:\d+\s+[AP]M\s+ET)'
244
+ match = re.match(pattern, snippet)
245
+ if published_date == 'No date' and match:
246
+ published_date = match.group(1)
247
+ dt_message = datetime.strptime(published_date, '%b %d, %Y, %I:%M %p ET')
248
+
249
+ snippet = re.sub(pattern, '', snippet).strip()
250
+
251
+ timestamp_ms = int(dt_message.timestamp() * 1000)
252
+ message_data: Dict[str, Any] = {
253
+ "timestamp_ms": timestamp_ms,
254
+ "datetime": dt_message.isoformat(), # Always in UTC
255
+ "title" : title,
256
+ "message": snippet,
257
+ "url" : link
258
+ }
259
+ json_str: str = json.dumps(message_data, ensure_ascii=False, sort_keys=True)
260
+ message_hash: str = hashlib.sha256(snippet.encode('utf-8')).hexdigest()
261
+ if (message_hash not in seen_hashes):
262
+ seen_hashes.add(message_hash)
263
+ processed_messages.append(message_data)
264
+
265
+ log(f"{message_data}")
266
+
267
+ dispatch_notification(title=f"{param['current_filename']} Incoming! {title}", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
268
+
269
+ with open(message_cache_file, 'a', encoding='utf-8') as f:
270
+ json.dump(message_data, f, ensure_ascii=False)
271
+ f.write('\n')
272
+
273
+ if param['alert_wav_path']and sys.platform == 'win32':
274
+ import winsound
275
+ for _ in range(param['num_shouts']):
276
+ winsound.PlaySound(param['alert_wav_path'], winsound.SND_FILENAME)
277
+
278
+ if redis_client:
279
+ try:
280
+ publish_topic = f"google_search"
281
+ redis_client.publish(publish_topic, json_str)
282
+ redis_client.setex(message_hash, param['mds']['redis']['ttl_ms'] // 1000, json_str)
283
+ log(f"Published message {json_str} to Redis topic {publish_topic}", LogLevel.INFO)
284
+ except Exception as e:
285
+ log(f"Failed to publish to Redis: {str(e)}", LogLevel.ERROR)
286
+
287
+ await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
288
+
289
+ if processed_messages:
290
+ oldest_message: Dict[str, Any] = min(processed_messages, key=lambda x: x['timestamp_ms'])
291
+ newest_message: Dict[str, Any] = max(processed_messages, key=lambda x: x['timestamp_ms'])
292
+ log(
293
+ json.dumps(
294
+ {
295
+ 'num_messages': len(processed_messages),
296
+ 'oldest': {
297
+ 'timestamp_ms': oldest_message['timestamp_ms'],
298
+ 'datetime': datetime.fromtimestamp(int(oldest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
299
+ },
300
+ 'latest': {
301
+ 'timestamp_ms': newest_message['timestamp_ms'],
302
+ 'datetime': datetime.fromtimestamp(int(newest_message['timestamp_ms']/1000),tz=timezone.utc).isoformat()
303
+ }
304
+ }, indent=2
305
+ ),
306
+ LogLevel.INFO
307
+ )
308
+
309
+ except Exception as e:
310
+ log(f"Oops {str(e)} {str(sys.exc_info()[0])} {str(sys.exc_info()[1])} {traceback.format_exc()}", LogLevel.ERROR)
311
+ finally:
312
+ await asyncio.sleep(int(param['loop_freq_ms'] / 1000))
313
+
314
+ if __name__ == '__main__':
315
+ try:
316
+ asyncio.run(main())
317
+ except KeyboardInterrupt:
318
+ log("Stopped by user", LogLevel.INFO)
319
+ except Exception as e:
320
+ log(f"Unexpected error: {str(e)}", LogLevel.ERROR)
@@ -254,7 +254,11 @@ async def main() -> None:
254
254
  processed_messages = sorted(processed_messages, key=lambda m: m['datetime'])
255
255
  last_message_date = processed_messages[-1]['datetime']
256
256
 
257
- redis_client: Optional[StrictRedis] = init_redis_client()
257
+ try:
258
+ redis_client: Optional[StrictRedis] = init_redis_client()
259
+ except Exception as redis_err:
260
+ redis_client = None
261
+ log(f"Failed to connect to redis. Still run but not publishing to it. {redis_err}")
258
262
 
259
263
  start_date: Optional[datetime] = None
260
264
  if param.get('start_date'):
@@ -362,7 +366,7 @@ async def main() -> None:
362
366
  processed_messages.append(message_data)
363
367
 
364
368
  if message_date>tm1:
365
- dispatch_notification(title=f"{param['current_filename']} {param['channel_username']} started", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
369
+ dispatch_notification(title=f"{param['current_filename']} {param['channel_username']} Incoming!", message=message_data, footer=param['notification']['footer'], params=notification_params, log_level=LogLevel.CRITICAL, logger=logger) # type: ignore
366
370
 
367
371
  with open(message_cache_file, 'a', encoding='utf-8') as f:
368
372
  json.dump(message_data, f, ensure_ascii=False)
@@ -47,7 +47,7 @@ class AnalyticUtilTests(unittest.TestCase):
47
47
  'datetime', 'datetime_utc', 'year', 'month', 'day', 'hour', 'minute', 'dayofweek',
48
48
  'pct_chg_on_close', 'candle_height', 'candle_body_height',
49
49
  'week_of_month', 'apac_trading_hr', 'emea_trading_hr', 'amer_trading_hr',
50
- 'is_green', 'pct_change_close',
50
+ 'is_green', 'candle_class', 'pct_change_close',
51
51
  'sma_short_periods', 'sma_long_periods', 'ema_short_periods', 'ema_long_periods', 'ema_close',
52
52
  'std', 'std_percent',
53
53
  'vwap_short_periods', 'vwap_long_periods',
@@ -15,6 +15,33 @@ from siglab_py.util.simple_math import bucket_series, bucketize_val
15
15
  from siglab_py.util.market_data_util import fix_column_types
16
16
  from siglab_py.constants import TrendDirection
17
17
 
18
+ def classify_candle(
19
+ candle : pd.Series,
20
+ min_candle_height_ratio : float = 3
21
+ ) -> Union[str, None]:
22
+ candle_class : Union[str, None] = None
23
+ open = candle['open']
24
+ high = candle['high']
25
+ low = candle['low']
26
+ close = candle['close']
27
+ candle_full_height = high - low # always positive
28
+ candle_body_height = close - open # can be negative
29
+ candle_height_ratio = candle_full_height / abs(candle_body_height) if candle_body_height!=0 else float('inf')
30
+
31
+ if (
32
+ candle_height_ratio>=min_candle_height_ratio
33
+ and close>low
34
+ ):
35
+ candle_class = 'hammer'
36
+ elif (
37
+ candle_height_ratio>=min_candle_height_ratio
38
+ and close<high
39
+ ):
40
+ candle_class = 'shooting_star'
41
+ # Keep add more ...
42
+
43
+ return candle_class
44
+
18
45
  # Fibonacci
19
46
  MAGIC_FIB_LEVELS = [0, 0.236, 0.382, 0.5, 0.618, 0.786, 1.00, 1.618, 2.618, 3.618, 4.236]
20
47
 
@@ -145,6 +172,8 @@ def compute_candles_stats(
145
172
 
146
173
  pd_candles['is_green'] = pd_candles['close'] >= pd_candles['open']
147
174
 
175
+ pd_candles['candle_class'] = pd_candles.apply(lambda row: classify_candle(row), axis=1)
176
+
148
177
  close_short_periods_rolling = pd_candles['close'].rolling(window=int(sliding_window_how_many_candles/slow_fast_interval_ratio))
149
178
  close_long_periods_rolling = pd_candles['close'].rolling(window=sliding_window_how_many_candles)
150
179
  close_short_periods_ewm = pd_candles['close'].ewm(span=int(sliding_window_how_many_candles/slow_fast_interval_ratio), adjust=False)
@@ -447,7 +476,7 @@ def compute_candles_stats(
447
476
  mitigated = pd_candles.iloc[idx + 1:row.name]['close'].lt(row['fvg_high']).any()
448
477
  return mitigated
449
478
 
450
- pd_candles['fvg_mitigated'] = pd_candles.apply(lambda row: compute_fvg_mitigated(row, pd_candles), axis=1)
479
+ pd_candles['fvg_mitigated'] = pd_candles.apply(lambda row: compute_fvg_mitigated(row, pd_candles), axis=1) # type: ignore
451
480
 
452
481
  '''
453
482
  RSI
@@ -83,7 +83,8 @@ async def async_instantiate_exchange(
83
83
  secret : str,
84
84
  passphrase : str,
85
85
  default_type : str,
86
- rate_limit_ms : float = 100
86
+ rate_limit_ms : float = 100,
87
+ verbose : bool = False
87
88
  ) -> Union[AnyExchange, None]:
88
89
  exchange : Union[AnyExchange, None] = None
89
90
  exchange_name : str = gateway_id.split('_')[0]
@@ -98,7 +99,8 @@ async def async_instantiate_exchange(
98
99
  'rateLimit' : rate_limit_ms,
99
100
  'options' : {
100
101
  'defaultType' : default_type
101
- }
102
+ },
103
+ 'verbose': verbose
102
104
  }
103
105
 
104
106
  if exchange_name=='binance':
@@ -156,7 +158,8 @@ async def async_instantiate_exchange(
156
158
  "walletAddress" : api_key,
157
159
  "privateKey" : secret,
158
160
  'enableRateLimit' : True,
159
- 'rateLimit' : rate_limit_ms
161
+ 'rateLimit' : rate_limit_ms,
162
+ 'verbose': verbose
160
163
  }
161
164
  ) # type: ignore
162
165
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: siglab_py
3
- Version: 0.5.72
3
+ Version: 0.5.78
4
4
  Summary: Market data fetches, TA calculations and generic order gateway.
5
5
  Author: r0bbarh00d
6
6
  Author-email: r0bbarh00d <r0bbarh00d@gmail.com>
@@ -17,6 +17,7 @@ siglab_py/market_data_providers/candles_ta_provider.py
17
17
  siglab_py/market_data_providers/ccxt_candles_ta_to_csv.py
18
18
  siglab_py/market_data_providers/deribit_options_expiry_provider.py
19
19
  siglab_py/market_data_providers/futu_candles_ta_to_csv.py
20
+ siglab_py/market_data_providers/google_monitor.py
20
21
  siglab_py/market_data_providers/orderbooks_provider.py
21
22
  siglab_py/market_data_providers/test_provider.py
22
23
  siglab_py/market_data_providers/tg_monitor.py