kuhl-haus-mdp 0.1.1__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/PKG-INFO +1 -1
  2. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/pyproject.toml +1 -1
  3. kuhl_haus_mdp-0.1.2/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +93 -0
  4. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/integ/massive_data_processor.py +2 -2
  5. kuhl_haus_mdp-0.1.2/src/kuhl_haus/mdp/models/market_data_cache_ttl.py +19 -0
  6. kuhl_haus_mdp-0.1.2/tests/integ/__init__.py +0 -0
  7. kuhl_haus_mdp-0.1.2/tests/test_massive_data_analyzer.py +233 -0
  8. kuhl_haus_mdp-0.1.1/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +0 -102
  9. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/LICENSE.txt +0 -0
  10. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/README.md +0 -0
  11. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/__init__.py +0 -0
  12. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/analyzers/__init__.py +0 -0
  13. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/analyzers/analyzer.py +0 -0
  14. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/analyzers/top_stocks.py +0 -0
  15. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/components/__init__.py +0 -0
  16. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/components/market_data_cache.py +0 -0
  17. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/components/market_data_scanner.py +0 -0
  18. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/components/widget_data_service.py +0 -0
  19. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/helpers/__init__.py +0 -0
  20. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/helpers/process_manager.py +0 -0
  21. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/helpers/queue_name_resolver.py +0 -0
  22. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/integ/__init__.py +0 -0
  23. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/integ/massive_data_listener.py +0 -0
  24. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/integ/massive_data_queues.py +0 -0
  25. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/integ/utils.py +0 -0
  26. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/integ/web_socket_message_serde.py +0 -0
  27. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/models/__init__.py +0 -0
  28. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/models/market_data_analyzer_result.py +0 -0
  29. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/models/market_data_cache_keys.py +0 -0
  30. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/models/market_data_pubsub_keys.py +0 -0
  31. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/models/market_data_scanner_names.py +0 -0
  32. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/src/kuhl_haus/mdp/models/massive_data_queue.py +0 -0
  33. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/tests/__init__.py +0 -0
  34. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/tests/components/__init__.py +0 -0
  35. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/tests/components/test_market_data_scanner.py +0 -0
  36. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/tests/components/test_widget_data_service.py +0 -0
  37. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/tests/helpers/__init__.py +0 -0
  38. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.2}/tests/helpers/test_process_manager.py +0 -0
  39. {kuhl_haus_mdp-0.1.1/tests → kuhl_haus_mdp-0.1.2/tests/helpers}/test_queue_name_resolver.py +0 -0
  40. {kuhl_haus_mdp-0.1.1/tests → kuhl_haus_mdp-0.1.2/tests/integ}/test_web_socket_message_serde.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -29,7 +29,7 @@ dependencies = [
29
29
  "python-dotenv",
30
30
  "massive",
31
31
  ]
32
- version = "0.1.1"
32
+ version = "0.1.2"
33
33
 
34
34
  [project.license]
35
35
  file = "LICENSE.txt"
@@ -0,0 +1,93 @@
1
+ import logging
2
+ from time import time
3
+ from typing import List, Optional
4
+ from massive.websocket.models import EventType
5
+
6
+ from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
+ from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
9
+
10
+
11
+ class MassiveDataAnalyzer:
12
+ def __init__(self):
13
+ self.logger = logging.getLogger(__name__)
14
+ self.event_handlers = {
15
+ EventType.LimitUpLimitDown.value: self.handle_luld_event,
16
+ EventType.EquityAgg.value: self.handle_equity_agg_event,
17
+ EventType.EquityAggMin.value: self.handle_equity_agg_event,
18
+ EventType.EquityTrade.value: self.handle_equity_trade_event,
19
+ EventType.EquityQuote.value: self.handle_equity_quote_event,
20
+ }
21
+
22
+ def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
23
+ """
24
+ Process raw market data message
25
+
26
+ Args:
27
+ data: serialized message from Massive/Polygon.io
28
+
29
+ Returns:
30
+ Processed result dict or None if message should be discarded
31
+ """
32
+ if "event_type" not in data:
33
+ self.logger.info("Message missing 'event_type'")
34
+ return self.handle_unknown_event(data)
35
+ event_type = data.get("event_type")
36
+
37
+ if "symbol" not in data:
38
+ self.logger.info("Message missing 'symbol'")
39
+ return self.handle_unknown_event(data)
40
+ symbol = data.get("symbol")
41
+
42
+ if event_type in self.event_handlers:
43
+ return self.event_handlers[event_type](**{"data": data, "symbol": symbol})
44
+ else:
45
+ self.logger.warning(f"Unsupported message type: {event_type}")
46
+ return self.handle_unknown_event(data)
47
+
48
+ @staticmethod
49
+ def handle_luld_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
50
+ return [MarketDataAnalyzerResult(
51
+ data=data,
52
+ cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
53
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
54
+ publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
55
+ )]
56
+
57
+ @staticmethod
58
+ def handle_equity_agg_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
+ return [MarketDataAnalyzerResult(
60
+ data=data,
61
+ cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
62
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
63
+ publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
64
+ )]
65
+
66
+ @staticmethod
67
+ def handle_equity_trade_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
68
+ return [MarketDataAnalyzerResult(
69
+ data=data,
70
+ cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
71
+ cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value,
72
+ publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
73
+ )]
74
+
75
+ @staticmethod
76
+ def handle_equity_quote_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
77
+ return [MarketDataAnalyzerResult(
78
+ data=data,
79
+ cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
80
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
81
+ publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
82
+ )]
83
+
84
+ @staticmethod
85
+ def handle_unknown_event(data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
86
+ timestamp = f"{time()}".replace('.','')
87
+ cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
88
+ return [MarketDataAnalyzerResult(
89
+ data=data,
90
+ cache_key=cache_key,
91
+ cache_ttl=MarketDataCacheTTL.ONE_DAY.value,
92
+ publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
93
+ )]
@@ -114,8 +114,8 @@ class MassiveDataProcessor:
114
114
  web_socket_message = json.loads(message.body.decode())
115
115
  data = WebSocketMessageSerde.to_dict(web_socket_message)
116
116
 
117
- # Delegate to analyzer (async)
118
- analyzer_results = await self.analyzer.analyze_data(data)
117
+ # Delegate to analyzer
118
+ analyzer_results = self.analyzer.analyze_data(data)
119
119
  if analyzer_results:
120
120
  self.processed += 1
121
121
  for analyzer_result in analyzer_results:
@@ -0,0 +1,19 @@
1
+ from enum import Enum
2
+
3
+
4
+ class MarketDataCacheTTL(Enum):
5
+ # Hours
6
+ ONE_HOUR = 3600
7
+ TWO_HOURS = 7200
8
+ FOUR_HOURS = 14400
9
+ SIX_HOURS = 21600
10
+ EIGHT_HOURS = 28800
11
+
12
+ # Days
13
+ ONE_DAY = 86400
14
+ TWO_DAYS = 172800
15
+ THREE_DAYS = 259200
16
+ FOUR_DAYS = 345600
17
+ FIVE_DAYS = 432000
18
+ SIX_DAYS = 518400
19
+ SEVEN_DAYS = 604800
File without changes
@@ -0,0 +1,233 @@
1
+ from unittest.mock import MagicMock
2
+
3
+ import pytest
4
+ from massive.websocket.models import EventType
5
+ from src.kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
6
+ from src.kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
+ from src.kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
9
+
10
+
11
+ @pytest.fixture
12
+ def valid_symbol():
13
+ return "TEST"
14
+
15
+
16
+ @pytest.fixture
17
+ def valid_luld_data(valid_symbol: str):
18
+ return {"event_type": EventType.LimitUpLimitDown.value, "symbol": valid_symbol, "test": "data"}
19
+
20
+
21
+ @pytest.fixture
22
+ def valid_equity_agg_data(valid_symbol: str):
23
+ return {"event_type": EventType.EquityAgg.value, "symbol": valid_symbol, "test": "data"}
24
+
25
+
26
+ @pytest.fixture
27
+ def valid_equity_agg_minute_data(valid_symbol: str):
28
+ return {"event_type": EventType.EquityAggMin.value, "symbol": valid_symbol, "test": "data"}
29
+
30
+
31
+ @pytest.fixture
32
+ def valid_equity_trade_data(valid_symbol: str):
33
+ return {"event_type": EventType.EquityTrade.value, "symbol": valid_symbol, "test": "data"}
34
+
35
+
36
+ @pytest.fixture
37
+ def valid_equity_quote_data(valid_symbol: str):
38
+ return {"event_type": EventType.EquityQuote.value, "symbol": valid_symbol, "test": "data"}
39
+
40
+
41
+ def test_analyze_data_with_valid_luld_event_expect_valid_result(valid_symbol, valid_luld_data):
42
+ # Arrange
43
+ sut = MassiveDataAnalyzer()
44
+ symbol = valid_symbol
45
+ data = valid_luld_data
46
+
47
+ # Act
48
+ result = sut.analyze_data(data)
49
+
50
+ # Assert
51
+ assert len(result) == 1
52
+ assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
53
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
54
+ assert result[0].publish_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
55
+ assert result[0].data == data
56
+
57
+
58
+ def test_analyze_data_with_equity_agg_event_happy_path(valid_symbol, valid_equity_agg_data):
59
+ # Arrange
60
+ sut = MassiveDataAnalyzer()
61
+
62
+ # Act
63
+ result = sut.analyze_data(data=valid_equity_agg_data)
64
+
65
+ # Assert
66
+ assert len(result) == 1
67
+ assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
68
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
69
+ assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
70
+ assert result[0].data == valid_equity_agg_data
71
+
72
+
73
+ def test_analyze_data_with_equity_agg_min_event_happy_path(valid_symbol, valid_equity_agg_minute_data):
74
+ # Arrange
75
+ sut = MassiveDataAnalyzer()
76
+
77
+ # Act
78
+ result = sut.analyze_data(data=valid_equity_agg_minute_data)
79
+
80
+ # Assert
81
+ assert len(result) == 1
82
+ assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
83
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
84
+ assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
85
+ assert result[0].data == valid_equity_agg_minute_data
86
+
87
+
88
+ def test_analyze_data_with_equity_trade_event_happy_path(valid_symbol, valid_equity_trade_data):
89
+ # Arrange
90
+ sut = MassiveDataAnalyzer()
91
+
92
+ # Act
93
+ result = sut.analyze_data(data=valid_equity_trade_data)
94
+
95
+ # Assert
96
+ assert len(result) == 1
97
+ assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
98
+ assert result[0].cache_ttl == MarketDataCacheTTL.EIGHT_HOURS.value
99
+ assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
100
+ assert result[0].data == valid_equity_trade_data
101
+
102
+
103
+ def test_analyze_data_equity_quote_event_happy_path(valid_symbol, valid_equity_quote_data):
104
+ # Arrange
105
+ sut = MassiveDataAnalyzer()
106
+
107
+ # Act
108
+ result = sut.analyze_data(data=valid_equity_quote_data)
109
+
110
+ # Assert
111
+ assert len(result) == 1
112
+ assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
113
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
114
+ assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
115
+ assert result[0].data == valid_equity_quote_data
116
+
117
+
118
+ def test_analyze_data_with_missing_event_type_expect_unknown_event():
119
+ # Arrange
120
+ sut = MassiveDataAnalyzer()
121
+ sut.handle_unknown_event = MagicMock(return_value=None)
122
+ data = {"symbol": "TEST", "data": "test"}
123
+
124
+ # Act
125
+ result = sut.analyze_data(data)
126
+
127
+ # Assert
128
+ sut.handle_unknown_event.assert_called_once_with(data)
129
+ assert result is None
130
+
131
+
132
+ def test_analyze_data_with_missing_symbol_expect_unknown_event():
133
+ # Arrange
134
+ sut = MassiveDataAnalyzer()
135
+ sut.handle_unknown_event = MagicMock(return_value=None)
136
+ data = {"event_type": EventType.LimitUpLimitDown.value, "data": "test"}
137
+
138
+ # Act
139
+ result = sut.analyze_data(data)
140
+
141
+ # Assert
142
+ sut.handle_unknown_event.assert_called_once_with(data)
143
+ assert result is None
144
+
145
+
146
+ def test_analyze_data_with_unsupported_event_expect_unknown_event():
147
+ # Arrange
148
+ sut = MassiveDataAnalyzer()
149
+ sut.handle_unknown_event = MagicMock(return_value=None)
150
+ data = {"event_type": "UnsupportedEvent", "symbol": "TEST", "test": "data"}
151
+
152
+ # Act
153
+ result = sut.analyze_data(data)
154
+
155
+ # Assert
156
+ sut.handle_unknown_event.assert_called_once_with(data)
157
+ assert result is None
158
+
159
+
160
+ def test_handle_luld_event_happy_path(valid_symbol, valid_luld_data):
161
+ # Arrange
162
+ sut = MassiveDataAnalyzer()
163
+
164
+ # Act
165
+ result = sut.handle_luld_event(data=valid_luld_data, symbol=valid_symbol)
166
+
167
+ # Assert
168
+ assert len(result) == 1
169
+ assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{valid_symbol}"
170
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
171
+ assert result[0].publish_key == f"{MarketDataCacheKeys.HALTS.value}:{valid_symbol}"
172
+ assert result[0].data == valid_luld_data
173
+
174
+
175
+ def test_handle_equity_agg_event_happy_path(valid_symbol, valid_equity_agg_data):
176
+ # Arrange
177
+ sut = MassiveDataAnalyzer()
178
+
179
+ # Act
180
+ result = sut.handle_equity_agg_event(data=valid_equity_agg_data, symbol=valid_symbol)
181
+
182
+ # Assert
183
+ assert len(result) == 1
184
+ assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
185
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
186
+ assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
187
+ assert result[0].data == valid_equity_agg_data
188
+
189
+
190
+ def test_handle_equity_trade_event_happy_path(valid_symbol, valid_equity_trade_data):
191
+ # Arrange
192
+ sut = MassiveDataAnalyzer()
193
+
194
+ # Act
195
+ result = sut.handle_equity_trade_event(data=valid_equity_trade_data, symbol=valid_symbol)
196
+
197
+ # Assert
198
+ assert len(result) == 1
199
+ assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
200
+ assert result[0].cache_ttl == MarketDataCacheTTL.EIGHT_HOURS.value
201
+ assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
202
+ assert result[0].data == valid_equity_trade_data
203
+
204
+
205
+ def test_handle_equity_quote_event_happy_path(valid_symbol, valid_equity_quote_data):
206
+ # Arrange
207
+ sut = MassiveDataAnalyzer()
208
+
209
+ # Act
210
+ result = sut.handle_equity_quote_event(data=valid_equity_quote_data, symbol=valid_symbol)
211
+
212
+ # Assert
213
+ assert len(result) == 1
214
+ assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
215
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
216
+ assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
217
+ assert result[0].data == valid_equity_quote_data
218
+
219
+
220
+ def test_handle_unknown_event_happy_path():
221
+ # Arrange
222
+ sut = MassiveDataAnalyzer()
223
+ data = {"unknown": "event"}
224
+
225
+ # Act
226
+ result = sut.handle_unknown_event(data=data)
227
+
228
+ # Assert
229
+ assert len(result) == 1
230
+ assert result[0].cache_key.startswith(f"{MarketDataCacheKeys.UNKNOWN.value}:")
231
+ assert result[0].cache_ttl == MarketDataCacheTTL.ONE_DAY.value
232
+ assert result[0].publish_key == MarketDataCacheKeys.UNKNOWN.value
233
+ assert result[0].data == data
@@ -1,102 +0,0 @@
1
- import logging
2
- from time import time
3
- from typing import List, Optional
4
- from massive.websocket.models import EventType
5
-
6
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
- from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
-
9
-
10
- class MassiveDataAnalyzer:
11
- def __init__(self):
12
- self.logger = logging.getLogger(__name__)
13
- self.event_handlers = {
14
- EventType.LimitUpLimitDown.value: self.handle_luld_event,
15
- EventType.EquityAgg.value: self.handle_equity_agg_event,
16
- EventType.EquityAggMin.value: self.handle_equity_agg_event,
17
- EventType.EquityTrade.value: self.handle_equity_trade_event,
18
- EventType.EquityQuote.value: self.handle_equity_quote_event,
19
- }
20
-
21
- async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
22
- """
23
- Process raw market data message
24
-
25
- Args:
26
- data: serialized message from Massive/Polygon.io
27
-
28
- Returns:
29
- Processed result dict or None if message should be discarded
30
- """
31
- if "event_type" not in data:
32
- self.logger.info("Message missing 'event_type'")
33
- return await self.handle_unknown_event(data)
34
- event_type = data.get("event_type")
35
-
36
- if "symbol" not in data:
37
- self.logger.info("Message missing 'symbol'")
38
- return await self.handle_unknown_event(data)
39
- symbol = data.get("symbol")
40
-
41
- if event_type in self.event_handlers:
42
- return await self.event_handlers[event_type](**{"data": data, "symbol": symbol})
43
- else:
44
- self.logger.warning(f"Unsupported message type: {event_type}")
45
- return await self.handle_unknown_event(data)
46
-
47
- async def handle_luld_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
48
- try:
49
- return [MarketDataAnalyzerResult(
50
- data=data,
51
- cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
52
- cache_ttl=28500, # 7 hours, 55 minutes
53
- publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
54
- )]
55
- except Exception as e:
56
- self.logger.error(f"Error processing LULD message for {symbol}: {data}", e)
57
-
58
- async def handle_equity_agg_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
- try:
60
- return [MarketDataAnalyzerResult(
61
- data=data,
62
- # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
63
- # cache_ttl=259200, # 3 days
64
- publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
65
- )]
66
- except Exception as e:
67
- self.logger.error(f"Error processing EquityAgg message for {symbol}: {data}", e)
68
-
69
- async def handle_equity_trade_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
70
- try:
71
- return [MarketDataAnalyzerResult(
72
- data=data,
73
- # cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
74
- # cache_ttl=28500, # 7 hours, 55 minutes
75
- publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
76
- )]
77
- except Exception as e:
78
- self.logger.error(f"Error processing EquityTrade message for {symbol}: {data}", e)
79
-
80
- async def handle_equity_quote_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
81
- try:
82
- return [MarketDataAnalyzerResult(
83
- data=data,
84
- # cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
85
- # cache_ttl=259200, # 3 days
86
- publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
87
- )]
88
- except Exception as e:
89
- self.logger.error(f"Error processing EquityQuote message for {symbol}: {data}", e)
90
-
91
- async def handle_unknown_event(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
92
- try:
93
- timestamp = f"{time()}".replace('.','')
94
- cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
95
- return [MarketDataAnalyzerResult(
96
- data=data,
97
- cache_key=cache_key,
98
- cache_ttl=86400, # 1 days
99
- publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
100
- )]
101
- except Exception as e:
102
- self.logger.error(f"Error processing unknown message type: {data}", e)
File without changes
File without changes