kuhl-haus-mdp 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kuhl_haus/mdp/__init__.py +10 -0
- kuhl_haus/mdp/analyzers/__init__.py +0 -0
- kuhl_haus/mdp/analyzers/analyzer.py +15 -0
- kuhl_haus/mdp/analyzers/massive_data_analyzer.py +102 -0
- kuhl_haus/mdp/analyzers/top_stocks.py +408 -0
- kuhl_haus/mdp/components/__init__.py +0 -0
- kuhl_haus/mdp/components/market_data_cache.py +29 -0
- kuhl_haus/mdp/components/market_data_scanner.py +236 -0
- kuhl_haus/mdp/components/widget_data_service.py +191 -0
- kuhl_haus/mdp/helpers/__init__.py +0 -0
- kuhl_haus/mdp/helpers/process_manager.py +228 -0
- kuhl_haus/mdp/helpers/queue_name_resolver.py +24 -0
- kuhl_haus/mdp/integ/__init__.py +0 -0
- kuhl_haus/mdp/integ/massive_data_listener.py +140 -0
- kuhl_haus/mdp/integ/massive_data_processor.py +236 -0
- kuhl_haus/mdp/integ/massive_data_queues.py +124 -0
- kuhl_haus/mdp/integ/utils.py +27 -0
- kuhl_haus/mdp/integ/web_socket_message_serde.py +143 -0
- kuhl_haus/mdp/models/__init__.py +0 -0
- kuhl_haus/mdp/models/market_data_analyzer_result.py +16 -0
- kuhl_haus/mdp/models/market_data_cache_keys.py +28 -0
- kuhl_haus/mdp/models/market_data_pubsub_keys.py +27 -0
- kuhl_haus/mdp/models/market_data_scanner_names.py +10 -0
- kuhl_haus/mdp/models/massive_data_queue.py +10 -0
- kuhl_haus_mdp-0.0.1.dist-info/METADATA +79 -0
- kuhl_haus_mdp-0.0.1.dist-info/RECORD +29 -0
- kuhl_haus_mdp-0.0.1.dist-info/WHEEL +4 -0
- kuhl_haus_mdp-0.0.1.dist-info/entry_points.txt +4 -0
- kuhl_haus_mdp-0.0.1.dist-info/licenses/LICENSE.txt +21 -0
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from importlib_metadata import PackageNotFoundError, version # pragma: no cover
|
|
2
|
+
|
|
3
|
+
try:
|
|
4
|
+
# Change here if project is renamed and does not equal the package name
|
|
5
|
+
dist_name = __name__
|
|
6
|
+
__version__ = version(dist_name)
|
|
7
|
+
except PackageNotFoundError: # pragma: no cover
|
|
8
|
+
__version__ = "unknown"
|
|
9
|
+
finally:
|
|
10
|
+
del version, PackageNotFoundError
|
|
File without changes
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from typing import Optional, List
|
|
2
|
+
from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Analyzer:
|
|
6
|
+
cache_key: str
|
|
7
|
+
|
|
8
|
+
def __init__(self, cache_key: str, **kwargs):
|
|
9
|
+
self.cache_key = cache_key
|
|
10
|
+
|
|
11
|
+
async def rehydrate(self, data: dict):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
15
|
+
pass
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from time import time
|
|
3
|
+
from typing import List, Optional
|
|
4
|
+
from massive.websocket.models import EventType
|
|
5
|
+
|
|
6
|
+
from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
|
|
7
|
+
from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class MassiveDataAnalyzer:
|
|
11
|
+
def __init__(self):
|
|
12
|
+
self.logger = logging.getLogger(__name__)
|
|
13
|
+
self.event_handlers = {
|
|
14
|
+
EventType.LimitUpLimitDown.value: self.handle_luld_event,
|
|
15
|
+
EventType.EquityAgg.value: self.handle_equity_agg_event,
|
|
16
|
+
EventType.EquityAggMin.value: self.handle_equity_agg_event,
|
|
17
|
+
EventType.EquityTrade.value: self.handle_equity_trade_event,
|
|
18
|
+
EventType.EquityQuote.value: self.handle_equity_quote_event,
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
22
|
+
"""
|
|
23
|
+
Process raw market data message
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
data: serialized message from Massive/Polygon.io
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Processed result dict or None if message should be discarded
|
|
30
|
+
"""
|
|
31
|
+
if "event_type" not in data:
|
|
32
|
+
self.logger.info("Message missing 'event_type'")
|
|
33
|
+
return await self.handle_unknown_event(data)
|
|
34
|
+
event_type = data.get("event_type")
|
|
35
|
+
|
|
36
|
+
if "symbol" not in data:
|
|
37
|
+
self.logger.info("Message missing 'symbol'")
|
|
38
|
+
return await self.handle_unknown_event(data)
|
|
39
|
+
symbol = data.get("symbol")
|
|
40
|
+
|
|
41
|
+
if event_type in self.event_handlers:
|
|
42
|
+
return await self.event_handlers[event_type](**{"data": data, "symbol": symbol})
|
|
43
|
+
else:
|
|
44
|
+
self.logger.warning(f"Unsupported message type: {event_type}")
|
|
45
|
+
return await self.handle_unknown_event(data)
|
|
46
|
+
|
|
47
|
+
async def handle_luld_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
48
|
+
try:
|
|
49
|
+
return [MarketDataAnalyzerResult(
|
|
50
|
+
data=data,
|
|
51
|
+
cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
|
|
52
|
+
cache_ttl=28500, # 7 hours, 55 minutes
|
|
53
|
+
publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
|
|
54
|
+
)]
|
|
55
|
+
except Exception as e:
|
|
56
|
+
self.logger.error(f"Error processing LULD message for {symbol}: {data}", e)
|
|
57
|
+
|
|
58
|
+
async def handle_equity_agg_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
59
|
+
try:
|
|
60
|
+
return [MarketDataAnalyzerResult(
|
|
61
|
+
data=data,
|
|
62
|
+
# cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
|
|
63
|
+
# cache_ttl=259200, # 3 days
|
|
64
|
+
publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
|
|
65
|
+
)]
|
|
66
|
+
except Exception as e:
|
|
67
|
+
self.logger.error(f"Error processing EquityAgg message for {symbol}: {data}", e)
|
|
68
|
+
|
|
69
|
+
async def handle_equity_trade_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
70
|
+
try:
|
|
71
|
+
return [MarketDataAnalyzerResult(
|
|
72
|
+
data=data,
|
|
73
|
+
# cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
|
|
74
|
+
# cache_ttl=28500, # 7 hours, 55 minutes
|
|
75
|
+
publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
|
|
76
|
+
)]
|
|
77
|
+
except Exception as e:
|
|
78
|
+
self.logger.error(f"Error processing EquityTrade message for {symbol}: {data}", e)
|
|
79
|
+
|
|
80
|
+
async def handle_equity_quote_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
81
|
+
try:
|
|
82
|
+
return [MarketDataAnalyzerResult(
|
|
83
|
+
data=data,
|
|
84
|
+
# cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
|
|
85
|
+
# cache_ttl=259200, # 3 days
|
|
86
|
+
publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
|
|
87
|
+
)]
|
|
88
|
+
except Exception as e:
|
|
89
|
+
self.logger.error(f"Error processing EquityQuote message for {symbol}: {data}", e)
|
|
90
|
+
|
|
91
|
+
async def handle_unknown_event(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
92
|
+
try:
|
|
93
|
+
timestamp = f"{time()}".replace('.','')
|
|
94
|
+
cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
|
|
95
|
+
return [MarketDataAnalyzerResult(
|
|
96
|
+
data=data,
|
|
97
|
+
cache_key=cache_key,
|
|
98
|
+
cache_ttl=86400, # 1 days
|
|
99
|
+
publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
|
|
100
|
+
)]
|
|
101
|
+
except Exception as e:
|
|
102
|
+
self.logger.error(f"Error processing unknown message type: {data}", e)
|
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import time
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from datetime import datetime, timezone, timedelta
|
|
6
|
+
from typing import Dict, Optional, List, Iterator
|
|
7
|
+
from zoneinfo import ZoneInfo
|
|
8
|
+
|
|
9
|
+
from massive.rest import RESTClient
|
|
10
|
+
from massive.rest.models import (
|
|
11
|
+
TickerSnapshot,
|
|
12
|
+
Agg,
|
|
13
|
+
)
|
|
14
|
+
from massive.websocket.models import (
|
|
15
|
+
EquityTrade,
|
|
16
|
+
EquityAgg,
|
|
17
|
+
EventType
|
|
18
|
+
)
|
|
19
|
+
from massive.exceptions import BadResponse
|
|
20
|
+
|
|
21
|
+
from kuhl_haus.mdp.analyzers.analyzer import Analyzer
|
|
22
|
+
from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
|
|
23
|
+
from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
|
|
24
|
+
from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# docs
|
|
28
|
+
# https://massive.com/docs/stocks/ws_stocks_am
|
|
29
|
+
# https://massive.com/docs/websocket/stocks/trades
|
|
30
|
+
|
|
31
|
+
@dataclass()
|
|
32
|
+
class TopStocksCacheItem:
|
|
33
|
+
day_start_time: Optional[float] = 0.0
|
|
34
|
+
|
|
35
|
+
# Cached details for each ticker
|
|
36
|
+
symbol_data_cache: Optional[Dict[str, dict]] = field(default_factory=lambda: defaultdict(dict))
|
|
37
|
+
|
|
38
|
+
# Top Volume map
|
|
39
|
+
top_volume_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
|
|
40
|
+
|
|
41
|
+
# Top Gappers map
|
|
42
|
+
top_gappers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
|
|
43
|
+
|
|
44
|
+
# Top Gainers map
|
|
45
|
+
top_gainers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
|
|
46
|
+
|
|
47
|
+
def to_dict(self):
|
|
48
|
+
ret = {
|
|
49
|
+
# Cache start time
|
|
50
|
+
"day_start_time": self.day_start_time,
|
|
51
|
+
|
|
52
|
+
# Maps
|
|
53
|
+
"symbol_data_cache": self.symbol_data_cache,
|
|
54
|
+
"top_volume_map": self.top_volume_map,
|
|
55
|
+
"top_gappers_map": self.top_gappers_map,
|
|
56
|
+
"top_gainers_map": self.top_gainers_map,
|
|
57
|
+
}
|
|
58
|
+
return ret
|
|
59
|
+
|
|
60
|
+
def top_volume(self, limit):
|
|
61
|
+
ret = []
|
|
62
|
+
for ticker, volume in sorted(self.top_volume_map.items(), key=lambda x: x[1], reverse=True)[
|
|
63
|
+
:limit
|
|
64
|
+
]:
|
|
65
|
+
try:
|
|
66
|
+
ret.append({
|
|
67
|
+
"symbol": ticker,
|
|
68
|
+
"volume": self.symbol_data_cache[ticker]["volume"],
|
|
69
|
+
"accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
|
|
70
|
+
"relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
|
|
71
|
+
"official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
|
|
72
|
+
"vwap": self.symbol_data_cache[ticker]["vwap"],
|
|
73
|
+
"open": self.symbol_data_cache[ticker]["open"],
|
|
74
|
+
"close": self.symbol_data_cache[ticker]["close"],
|
|
75
|
+
"high": self.symbol_data_cache[ticker]["high"],
|
|
76
|
+
"low": self.symbol_data_cache[ticker]["low"],
|
|
77
|
+
"aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
|
|
78
|
+
"average_size": self.symbol_data_cache[ticker]["average_size"],
|
|
79
|
+
"avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
|
|
80
|
+
"prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
|
|
81
|
+
"prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
|
|
82
|
+
"prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
|
|
83
|
+
"change": self.symbol_data_cache[ticker]["change"],
|
|
84
|
+
"pct_change": self.symbol_data_cache[ticker]["pct_change"],
|
|
85
|
+
"change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
|
|
86
|
+
"pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
|
|
87
|
+
"start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
|
|
88
|
+
"end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
|
|
89
|
+
})
|
|
90
|
+
except KeyError:
|
|
91
|
+
del self.top_volume_map[ticker]
|
|
92
|
+
return ret
|
|
93
|
+
|
|
94
|
+
def top_gappers(self, limit):
|
|
95
|
+
ret = []
|
|
96
|
+
for ticker, pct_change in sorted(self.top_gappers_map.items(), key=lambda x: x[1], reverse=True)[
|
|
97
|
+
:limit
|
|
98
|
+
]:
|
|
99
|
+
try:
|
|
100
|
+
if pct_change <= 0:
|
|
101
|
+
break
|
|
102
|
+
ret.append({
|
|
103
|
+
"symbol": ticker,
|
|
104
|
+
"volume": self.symbol_data_cache[ticker]["volume"],
|
|
105
|
+
"accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
|
|
106
|
+
"relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
|
|
107
|
+
"official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
|
|
108
|
+
"vwap": self.symbol_data_cache[ticker]["vwap"],
|
|
109
|
+
"open": self.symbol_data_cache[ticker]["open"],
|
|
110
|
+
"close": self.symbol_data_cache[ticker]["close"],
|
|
111
|
+
"high": self.symbol_data_cache[ticker]["high"],
|
|
112
|
+
"low": self.symbol_data_cache[ticker]["low"],
|
|
113
|
+
"aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
|
|
114
|
+
"average_size": self.symbol_data_cache[ticker]["average_size"],
|
|
115
|
+
"avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
|
|
116
|
+
"prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
|
|
117
|
+
"prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
|
|
118
|
+
"prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
|
|
119
|
+
"change": self.symbol_data_cache[ticker]["change"],
|
|
120
|
+
"pct_change": self.symbol_data_cache[ticker]["pct_change"],
|
|
121
|
+
"change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
|
|
122
|
+
"pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
|
|
123
|
+
"start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
|
|
124
|
+
"end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
|
|
125
|
+
})
|
|
126
|
+
except KeyError:
|
|
127
|
+
del self.top_gappers_map[ticker]
|
|
128
|
+
return ret
|
|
129
|
+
|
|
130
|
+
def top_gainers(self, limit):
|
|
131
|
+
ret = []
|
|
132
|
+
for ticker, pct_change in sorted(self.top_gainers_map.items(), key=lambda x: x[1], reverse=True)[
|
|
133
|
+
:limit
|
|
134
|
+
]:
|
|
135
|
+
try:
|
|
136
|
+
if pct_change <= 0:
|
|
137
|
+
break
|
|
138
|
+
ret.append({
|
|
139
|
+
"symbol": ticker,
|
|
140
|
+
"volume": self.symbol_data_cache[ticker]["volume"],
|
|
141
|
+
"accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
|
|
142
|
+
"relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
|
|
143
|
+
"official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
|
|
144
|
+
"vwap": self.symbol_data_cache[ticker]["vwap"],
|
|
145
|
+
"open": self.symbol_data_cache[ticker]["open"],
|
|
146
|
+
"close": self.symbol_data_cache[ticker]["close"],
|
|
147
|
+
"high": self.symbol_data_cache[ticker]["high"],
|
|
148
|
+
"low": self.symbol_data_cache[ticker]["low"],
|
|
149
|
+
"aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
|
|
150
|
+
"average_size": self.symbol_data_cache[ticker]["average_size"],
|
|
151
|
+
"avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
|
|
152
|
+
"prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
|
|
153
|
+
"prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
|
|
154
|
+
"prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
|
|
155
|
+
"change": self.symbol_data_cache[ticker]["change"],
|
|
156
|
+
"pct_change": self.symbol_data_cache[ticker]["pct_change"],
|
|
157
|
+
"change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
|
|
158
|
+
"pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
|
|
159
|
+
"start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
|
|
160
|
+
"end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
|
|
161
|
+
})
|
|
162
|
+
except KeyError:
|
|
163
|
+
del self.top_gainers_map[ticker]
|
|
164
|
+
return ret
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class TopStocksAnalyzer(Analyzer):
|
|
168
|
+
|
|
169
|
+
def __init__(self, rest_client: RESTClient, **kwargs):
|
|
170
|
+
if "cache_key" not in kwargs:
|
|
171
|
+
kwargs["cache_key"] = MarketDataCacheKeys.TOP_STOCKS_SCANNER.value
|
|
172
|
+
super().__init__(**kwargs)
|
|
173
|
+
self.rest_client = rest_client
|
|
174
|
+
self.logger = logging.getLogger(__name__)
|
|
175
|
+
self.cache_item = TopStocksCacheItem()
|
|
176
|
+
self.last_update_time = 0
|
|
177
|
+
self.pre_market_reset = False
|
|
178
|
+
|
|
179
|
+
async def rehydrate(self, data: dict):
|
|
180
|
+
if not data:
|
|
181
|
+
self.cache_item = TopStocksCacheItem()
|
|
182
|
+
self.logger.info("No data to rehydrate TopStocksCacheItem.")
|
|
183
|
+
return
|
|
184
|
+
|
|
185
|
+
# Get current time in UTC, then convert to Eastern Time
|
|
186
|
+
utc_now = datetime.now(timezone.utc)
|
|
187
|
+
et_now = utc_now.astimezone(ZoneInfo("America/New_York"))
|
|
188
|
+
|
|
189
|
+
# Check if within trading hours: Mon-Fri, 04:00-19:59 ET
|
|
190
|
+
is_weekday = et_now.weekday() < 5
|
|
191
|
+
is_trading_hours = 4 <= et_now.hour < 20
|
|
192
|
+
if not is_weekday or not is_trading_hours:
|
|
193
|
+
self.cache_item = TopStocksCacheItem()
|
|
194
|
+
self.logger.info(f"Outside market hours ({et_now.strftime('%H:%M:%S %Z')}), clearing cache.")
|
|
195
|
+
return
|
|
196
|
+
self.cache_item = TopStocksCacheItem(**data)
|
|
197
|
+
self.logger.info("Rehydrated TopStocksCacheItem")
|
|
198
|
+
|
|
199
|
+
async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
|
|
200
|
+
utc_now = datetime.now(timezone.utc)
|
|
201
|
+
et_now = utc_now.astimezone(ZoneInfo("America/New_York"))
|
|
202
|
+
current_day = et_now.replace(hour=4, minute=0, second=0, microsecond=0).timestamp()
|
|
203
|
+
if current_day != self.cache_item.day_start_time:
|
|
204
|
+
self.logger.info(f"New day: {current_day} - resetting cache.")
|
|
205
|
+
self.cache_item = TopStocksCacheItem()
|
|
206
|
+
self.cache_item.day_start_time = current_day
|
|
207
|
+
elif et_now.hour == 9 and et_now.minute == 30 and not self.pre_market_reset:
|
|
208
|
+
self.logger.info("Market is now open; resetting symbol data cache.")
|
|
209
|
+
self.cache_item.symbol_data_cache = {}
|
|
210
|
+
self.pre_market_reset = True
|
|
211
|
+
|
|
212
|
+
event_type = data.get("event_type")
|
|
213
|
+
symbol = data.get("symbol")
|
|
214
|
+
if not event_type:
|
|
215
|
+
self.logger.info(f"Discarding data: {data}")
|
|
216
|
+
return None
|
|
217
|
+
elif not symbol:
|
|
218
|
+
self.logger.info(f"Discarding data: {data}")
|
|
219
|
+
return None
|
|
220
|
+
elif event_type == EventType.EquityAgg.value:
|
|
221
|
+
self.logger.debug(f"Processing EquityAgg: {data.get('symbol')}")
|
|
222
|
+
await self.handle_equity_agg(EquityAgg(**data))
|
|
223
|
+
elif event_type == EventType.EquityAggMin.value:
|
|
224
|
+
self.logger.debug(f"Processing EquityAggMin: {data.get('symbol')}")
|
|
225
|
+
await self.handle_equity_agg(EquityAgg(**data))
|
|
226
|
+
else:
|
|
227
|
+
self.logger.info(f"Discarding data: {data}")
|
|
228
|
+
return None
|
|
229
|
+
current_time = int(time.time())
|
|
230
|
+
# return results once per second
|
|
231
|
+
if current_time <= self.last_update_time:
|
|
232
|
+
return None
|
|
233
|
+
self.last_update_time = current_time
|
|
234
|
+
|
|
235
|
+
result = [
|
|
236
|
+
# MarketDataAnalyzerResult(
|
|
237
|
+
# data=data,
|
|
238
|
+
# cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
|
|
239
|
+
# cache_ttl=86400, # 1 day
|
|
240
|
+
# # publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
|
|
241
|
+
# ),
|
|
242
|
+
MarketDataAnalyzerResult(
|
|
243
|
+
data=self.cache_item.to_dict(),
|
|
244
|
+
cache_key=self.cache_key,
|
|
245
|
+
cache_ttl=28500, # 7 hours, 55 minutes
|
|
246
|
+
),
|
|
247
|
+
MarketDataAnalyzerResult(
|
|
248
|
+
data=self.cache_item.top_volume(100),
|
|
249
|
+
cache_key=MarketDataPubSubKeys.TOP_VOLUME_SCANNER.value,
|
|
250
|
+
cache_ttl=259200, # 3 days
|
|
251
|
+
publish_key=MarketDataPubSubKeys.TOP_VOLUME_SCANNER.value,
|
|
252
|
+
),
|
|
253
|
+
MarketDataAnalyzerResult(
|
|
254
|
+
data=self.cache_item.top_gainers(500),
|
|
255
|
+
cache_key=MarketDataPubSubKeys.TOP_GAINERS_SCANNER.value,
|
|
256
|
+
cache_ttl=259200, # 3 days
|
|
257
|
+
publish_key=MarketDataPubSubKeys.TOP_GAINERS_SCANNER.value,
|
|
258
|
+
),
|
|
259
|
+
MarketDataAnalyzerResult(
|
|
260
|
+
data=self.cache_item.top_gappers(500),
|
|
261
|
+
cache_key=MarketDataPubSubKeys.TOP_GAPPERS_SCANNER.value,
|
|
262
|
+
cache_ttl=259200, # 3 days
|
|
263
|
+
publish_key=MarketDataPubSubKeys.TOP_GAPPERS_SCANNER.value,
|
|
264
|
+
)
|
|
265
|
+
]
|
|
266
|
+
|
|
267
|
+
return result
|
|
268
|
+
|
|
269
|
+
async def handle_equity_agg(self, event: EquityAgg):
|
|
270
|
+
# Get data from symbol data cache or Rest API
|
|
271
|
+
if event.symbol in self.cache_item.symbol_data_cache:
|
|
272
|
+
cached_data = self.cache_item.symbol_data_cache[event.symbol]
|
|
273
|
+
avg_volume = cached_data["avg_volume"]
|
|
274
|
+
prev_day_close = cached_data["prev_day_close"]
|
|
275
|
+
prev_day_volume = cached_data["prev_day_volume"]
|
|
276
|
+
prev_day_vwap = cached_data["prev_day_vwap"]
|
|
277
|
+
else:
|
|
278
|
+
# Get snapshot for previous day's data
|
|
279
|
+
retry_count = 0
|
|
280
|
+
max_tries = 3
|
|
281
|
+
prev_day_close = 0
|
|
282
|
+
prev_day_volume = 0
|
|
283
|
+
prev_day_vwap = 0
|
|
284
|
+
while retry_count < max_tries:
|
|
285
|
+
try:
|
|
286
|
+
snapshot = await self.get_ticker_snapshot(event.symbol)
|
|
287
|
+
prev_day_close = snapshot.prev_day.close
|
|
288
|
+
prev_day_volume = snapshot.prev_day.volume
|
|
289
|
+
prev_day_vwap = snapshot.prev_day.vwap
|
|
290
|
+
break
|
|
291
|
+
except BadResponse as e:
|
|
292
|
+
self.logger.error(f"Error getting snapshot for {event.symbol}: {repr(e)}", exc_info=e, stack_info=True)
|
|
293
|
+
retry_count += 1
|
|
294
|
+
if retry_count == max_tries and prev_day_close == 0:
|
|
295
|
+
self.logger.error(f"Failed to get snapshot for {event.symbol} after {max_tries} tries.")
|
|
296
|
+
return
|
|
297
|
+
|
|
298
|
+
# Get average volume
|
|
299
|
+
retry_count = 0
|
|
300
|
+
max_tries = 3
|
|
301
|
+
avg_volume = 0
|
|
302
|
+
while retry_count < max_tries:
|
|
303
|
+
try:
|
|
304
|
+
avg_volume = await self.get_avg_volume(event.symbol)
|
|
305
|
+
break
|
|
306
|
+
except (BadResponse, ZeroDivisionError) as e:
|
|
307
|
+
self.logger.error(f"Error getting average volume for {event.symbol}: {repr(e)}", exc_info=e, stack_info=True)
|
|
308
|
+
retry_count += 1
|
|
309
|
+
if retry_count == max_tries and avg_volume == 0:
|
|
310
|
+
self.logger.error(f"Failed to get average volume for {event.symbol} after {max_tries} tries.")
|
|
311
|
+
return
|
|
312
|
+
|
|
313
|
+
# Calculate relative volume
|
|
314
|
+
if avg_volume == 0:
|
|
315
|
+
relative_volume = 0
|
|
316
|
+
else:
|
|
317
|
+
relative_volume = event.accumulated_volume / avg_volume
|
|
318
|
+
|
|
319
|
+
# Calculate percentage change since previous close
|
|
320
|
+
if prev_day_close == 0:
|
|
321
|
+
change = 0
|
|
322
|
+
pct_change = 0
|
|
323
|
+
else:
|
|
324
|
+
change = event.close - prev_day_close
|
|
325
|
+
pct_change = change / prev_day_close * 100
|
|
326
|
+
|
|
327
|
+
# Calculate percentage change since opening bell
|
|
328
|
+
change_since_open = 0
|
|
329
|
+
pct_change_since_open = 0
|
|
330
|
+
if event.official_open_price:
|
|
331
|
+
change_since_open = event.close - event.official_open_price
|
|
332
|
+
pct_change_since_open = change_since_open / event.official_open_price * 100
|
|
333
|
+
|
|
334
|
+
# Sort top tickers by accumulated volume
|
|
335
|
+
self.cache_item.top_volume_map[event.symbol] = event.accumulated_volume
|
|
336
|
+
|
|
337
|
+
# Sort top gappers by percentage gain since the previous day's close
|
|
338
|
+
self.cache_item.top_gappers_map[event.symbol] = pct_change
|
|
339
|
+
|
|
340
|
+
# Sort top gainers by percentage gain since the opening bell
|
|
341
|
+
self.cache_item.top_gainers_map[event.symbol] = pct_change_since_open
|
|
342
|
+
|
|
343
|
+
# Update symbol data cache
|
|
344
|
+
self.cache_item.symbol_data_cache[event.symbol] = {
|
|
345
|
+
"symbol": event.symbol,
|
|
346
|
+
"volume": event.volume,
|
|
347
|
+
"accumulated_volume": event.accumulated_volume,
|
|
348
|
+
"relative_volume": relative_volume,
|
|
349
|
+
"official_open_price": event.official_open_price,
|
|
350
|
+
"vwap": event.vwap,
|
|
351
|
+
"open": event.open,
|
|
352
|
+
"close": event.close,
|
|
353
|
+
"high": event.high,
|
|
354
|
+
"low": event.low,
|
|
355
|
+
"aggregate_vwap": event.aggregate_vwap,
|
|
356
|
+
"average_size": event.average_size,
|
|
357
|
+
"avg_volume": avg_volume,
|
|
358
|
+
"prev_day_close": prev_day_close,
|
|
359
|
+
"prev_day_volume": prev_day_volume,
|
|
360
|
+
"prev_day_vwap": prev_day_vwap,
|
|
361
|
+
"change": change,
|
|
362
|
+
"pct_change": pct_change,
|
|
363
|
+
"change_since_open": change_since_open,
|
|
364
|
+
"pct_change_since_open": pct_change_since_open,
|
|
365
|
+
"start_timestamp": event.start_timestamp,
|
|
366
|
+
"end_timestamp": event.end_timestamp,
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
|
|
370
|
+
self.logger.debug(f"Getting snapshot for {ticker}")
|
|
371
|
+
result: TickerSnapshot = self.rest_client.get_snapshot_ticker(
|
|
372
|
+
market_type="stocks",
|
|
373
|
+
ticker=ticker
|
|
374
|
+
)
|
|
375
|
+
self.logger.debug(f"Snapshot result: {result}")
|
|
376
|
+
return result
|
|
377
|
+
|
|
378
|
+
async def get_avg_volume(self, ticker: str):
|
|
379
|
+
self.logger.debug(f"Getting average volume for {ticker}")
|
|
380
|
+
# Get date string in YYYY-MM-DD format
|
|
381
|
+
end_date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
|
382
|
+
# Get date from 30 trading sessions ago in YYYY-MM-DD format
|
|
383
|
+
start_date = (datetime.now(timezone.utc) - timedelta(days=42)).strftime("%Y-%m-%d")
|
|
384
|
+
|
|
385
|
+
result: Iterator[Agg] = self.rest_client.list_aggs(
|
|
386
|
+
ticker=ticker,
|
|
387
|
+
multiplier=1,
|
|
388
|
+
timespan="day",
|
|
389
|
+
from_=start_date,
|
|
390
|
+
to=end_date,
|
|
391
|
+
adjusted=True,
|
|
392
|
+
sort="desc"
|
|
393
|
+
)
|
|
394
|
+
self.logger.debug(f"average volume result: {result}")
|
|
395
|
+
|
|
396
|
+
total_volume = 0
|
|
397
|
+
max_periods = 30
|
|
398
|
+
periods_calculated = 0
|
|
399
|
+
for agg in result:
|
|
400
|
+
if periods_calculated < max_periods:
|
|
401
|
+
total_volume += agg.volume
|
|
402
|
+
periods_calculated += 1
|
|
403
|
+
else:
|
|
404
|
+
break
|
|
405
|
+
avg_volume = total_volume / periods_calculated
|
|
406
|
+
|
|
407
|
+
self.logger.debug(f"average volume {ticker}: {avg_volume}")
|
|
408
|
+
return avg_volume
|
|
File without changes
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any, Optional
|
|
4
|
+
|
|
5
|
+
import redis.asyncio as aioredis
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class MarketDataCache:
|
|
9
|
+
def __init__(self, redis_client: aioredis.Redis):
|
|
10
|
+
self.logger = logging.getLogger(__name__)
|
|
11
|
+
self.redis_client = redis_client
|
|
12
|
+
|
|
13
|
+
async def get_cache(self, cache_key: str) -> Optional[dict]:
|
|
14
|
+
"""Fetch current value from Redis cache (for snapshot requests)."""
|
|
15
|
+
value = await self.redis_client.get(cache_key)
|
|
16
|
+
if value:
|
|
17
|
+
return json.loads(value)
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
async def cache_data(self, data: Any, cache_key: str, cache_ttl: int = 0):
|
|
21
|
+
if cache_ttl > 0:
|
|
22
|
+
await self.redis_client.setex(cache_key, cache_ttl, json.dumps(data))
|
|
23
|
+
else:
|
|
24
|
+
await self.redis_client.set(cache_key, json.dumps(data))
|
|
25
|
+
self.logger.debug(f"Cached data for {cache_key}")
|
|
26
|
+
|
|
27
|
+
async def publish_data(self, data: Any, publish_key: str = None):
|
|
28
|
+
await self.redis_client.publish(publish_key, json.dumps(data))
|
|
29
|
+
self.logger.debug(f"Published data for {publish_key}")
|