kuhl-haus-mdp 0.1.1__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/PKG-INFO +1 -1
  2. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/pyproject.toml +1 -1
  3. kuhl_haus_mdp-0.1.3/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +93 -0
  4. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/analyzers/top_stocks.py +3 -151
  5. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/integ/massive_data_processor.py +2 -2
  6. kuhl_haus_mdp-0.1.3/src/kuhl_haus/mdp/models/market_data_cache_ttl.py +19 -0
  7. kuhl_haus_mdp-0.1.3/src/kuhl_haus/mdp/models/top_stocks_cache_item.py +143 -0
  8. kuhl_haus_mdp-0.1.3/tests/analyzers/test_massive_data_analyzer.py +233 -0
  9. kuhl_haus_mdp-0.1.3/tests/helpers/__init__.py +0 -0
  10. kuhl_haus_mdp-0.1.3/tests/integ/__init__.py +0 -0
  11. kuhl_haus_mdp-0.1.3/tests/models/__init__.py +0 -0
  12. kuhl_haus_mdp-0.1.3/tests/models/test_top_stocks_cache_item.py +109 -0
  13. kuhl_haus_mdp-0.1.1/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +0 -102
  14. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/LICENSE.txt +0 -0
  15. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/README.md +0 -0
  16. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/__init__.py +0 -0
  17. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/analyzers/__init__.py +0 -0
  18. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/analyzers/analyzer.py +0 -0
  19. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/components/__init__.py +0 -0
  20. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/components/market_data_cache.py +0 -0
  21. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/components/market_data_scanner.py +0 -0
  22. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/components/widget_data_service.py +0 -0
  23. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/helpers/__init__.py +0 -0
  24. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/helpers/process_manager.py +0 -0
  25. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/helpers/queue_name_resolver.py +0 -0
  26. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/integ/__init__.py +0 -0
  27. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/integ/massive_data_listener.py +0 -0
  28. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/integ/massive_data_queues.py +0 -0
  29. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/integ/utils.py +0 -0
  30. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/integ/web_socket_message_serde.py +0 -0
  31. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/models/__init__.py +0 -0
  32. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/models/market_data_analyzer_result.py +0 -0
  33. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/models/market_data_cache_keys.py +0 -0
  34. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/models/market_data_pubsub_keys.py +0 -0
  35. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/models/market_data_scanner_names.py +0 -0
  36. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/src/kuhl_haus/mdp/models/massive_data_queue.py +0 -0
  37. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/tests/__init__.py +0 -0
  38. {kuhl_haus_mdp-0.1.1/tests/components → kuhl_haus_mdp-0.1.3/tests/analyzers}/__init__.py +0 -0
  39. {kuhl_haus_mdp-0.1.1/tests/helpers → kuhl_haus_mdp-0.1.3/tests/components}/__init__.py +0 -0
  40. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/tests/components/test_market_data_scanner.py +0 -0
  41. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/tests/components/test_widget_data_service.py +0 -0
  42. {kuhl_haus_mdp-0.1.1 → kuhl_haus_mdp-0.1.3}/tests/helpers/test_process_manager.py +0 -0
  43. {kuhl_haus_mdp-0.1.1/tests → kuhl_haus_mdp-0.1.3/tests/helpers}/test_queue_name_resolver.py +0 -0
  44. {kuhl_haus_mdp-0.1.1/tests → kuhl_haus_mdp-0.1.3/tests/integ}/test_web_socket_message_serde.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -29,7 +29,7 @@ dependencies = [
29
29
  "python-dotenv",
30
30
  "massive",
31
31
  ]
32
- version = "0.1.1"
32
+ version = "0.1.3"
33
33
 
34
34
  [project.license]
35
35
  file = "LICENSE.txt"
@@ -0,0 +1,93 @@
1
+ import logging
2
+ from time import time
3
+ from typing import List, Optional
4
+ from massive.websocket.models import EventType
5
+
6
+ from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
+ from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
9
+
10
+
11
+ class MassiveDataAnalyzer:
12
+ def __init__(self):
13
+ self.logger = logging.getLogger(__name__)
14
+ self.event_handlers = {
15
+ EventType.LimitUpLimitDown.value: self.handle_luld_event,
16
+ EventType.EquityAgg.value: self.handle_equity_agg_event,
17
+ EventType.EquityAggMin.value: self.handle_equity_agg_event,
18
+ EventType.EquityTrade.value: self.handle_equity_trade_event,
19
+ EventType.EquityQuote.value: self.handle_equity_quote_event,
20
+ }
21
+
22
+ def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
23
+ """
24
+ Process raw market data message
25
+
26
+ Args:
27
+ data: serialized message from Massive/Polygon.io
28
+
29
+ Returns:
30
+ Processed result dict or None if message should be discarded
31
+ """
32
+ if "event_type" not in data:
33
+ self.logger.info("Message missing 'event_type'")
34
+ return self.handle_unknown_event(data)
35
+ event_type = data.get("event_type")
36
+
37
+ if "symbol" not in data:
38
+ self.logger.info("Message missing 'symbol'")
39
+ return self.handle_unknown_event(data)
40
+ symbol = data.get("symbol")
41
+
42
+ if event_type in self.event_handlers:
43
+ return self.event_handlers[event_type](**{"data": data, "symbol": symbol})
44
+ else:
45
+ self.logger.warning(f"Unsupported message type: {event_type}")
46
+ return self.handle_unknown_event(data)
47
+
48
+ @staticmethod
49
+ def handle_luld_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
50
+ return [MarketDataAnalyzerResult(
51
+ data=data,
52
+ cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
53
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
54
+ publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
55
+ )]
56
+
57
+ @staticmethod
58
+ def handle_equity_agg_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
+ return [MarketDataAnalyzerResult(
60
+ data=data,
61
+ cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
62
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
63
+ publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
64
+ )]
65
+
66
+ @staticmethod
67
+ def handle_equity_trade_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
68
+ return [MarketDataAnalyzerResult(
69
+ data=data,
70
+ cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
71
+ cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value,
72
+ publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
73
+ )]
74
+
75
+ @staticmethod
76
+ def handle_equity_quote_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
77
+ return [MarketDataAnalyzerResult(
78
+ data=data,
79
+ cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
80
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
81
+ publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
82
+ )]
83
+
84
+ @staticmethod
85
+ def handle_unknown_event(data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
86
+ timestamp = f"{time()}".replace('.','')
87
+ cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
88
+ return [MarketDataAnalyzerResult(
89
+ data=data,
90
+ cache_key=cache_key,
91
+ cache_ttl=MarketDataCacheTTL.ONE_DAY.value,
92
+ publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
93
+ )]
@@ -1,167 +1,25 @@
1
1
  import logging
2
2
  import time
3
- from collections import defaultdict
4
- from dataclasses import dataclass, field
5
3
  from datetime import datetime, timezone, timedelta
6
- from typing import Dict, Optional, List, Iterator
4
+ from typing import Optional, List, Iterator
7
5
  from zoneinfo import ZoneInfo
8
6
 
7
+ from massive.exceptions import BadResponse
9
8
  from massive.rest import RESTClient
10
9
  from massive.rest.models import (
11
10
  TickerSnapshot,
12
11
  Agg,
13
12
  )
14
13
  from massive.websocket.models import (
15
- EquityTrade,
16
14
  EquityAgg,
17
15
  EventType
18
16
  )
19
- from massive.exceptions import BadResponse
20
17
 
21
18
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
22
19
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
23
20
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
24
21
  from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
25
-
26
-
27
- # docs
28
- # https://massive.com/docs/stocks/ws_stocks_am
29
- # https://massive.com/docs/websocket/stocks/trades
30
-
31
- @dataclass()
32
- class TopStocksCacheItem:
33
- day_start_time: Optional[float] = 0.0
34
-
35
- # Cached details for each ticker
36
- symbol_data_cache: Optional[Dict[str, dict]] = field(default_factory=lambda: defaultdict(dict))
37
-
38
- # Top Volume map
39
- top_volume_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
40
-
41
- # Top Gappers map
42
- top_gappers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
43
-
44
- # Top Gainers map
45
- top_gainers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
46
-
47
- def to_dict(self):
48
- ret = {
49
- # Cache start time
50
- "day_start_time": self.day_start_time,
51
-
52
- # Maps
53
- "symbol_data_cache": self.symbol_data_cache,
54
- "top_volume_map": self.top_volume_map,
55
- "top_gappers_map": self.top_gappers_map,
56
- "top_gainers_map": self.top_gainers_map,
57
- }
58
- return ret
59
-
60
- def top_volume(self, limit):
61
- ret = []
62
- for ticker, volume in sorted(self.top_volume_map.items(), key=lambda x: x[1], reverse=True)[
63
- :limit
64
- ]:
65
- try:
66
- ret.append({
67
- "symbol": ticker,
68
- "volume": self.symbol_data_cache[ticker]["volume"],
69
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
70
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
71
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
72
- "vwap": self.symbol_data_cache[ticker]["vwap"],
73
- "open": self.symbol_data_cache[ticker]["open"],
74
- "close": self.symbol_data_cache[ticker]["close"],
75
- "high": self.symbol_data_cache[ticker]["high"],
76
- "low": self.symbol_data_cache[ticker]["low"],
77
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
78
- "average_size": self.symbol_data_cache[ticker]["average_size"],
79
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
80
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
81
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
82
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
83
- "change": self.symbol_data_cache[ticker]["change"],
84
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
85
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
86
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
87
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
88
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
89
- })
90
- except KeyError:
91
- del self.top_volume_map[ticker]
92
- return ret
93
-
94
- def top_gappers(self, limit):
95
- ret = []
96
- for ticker, pct_change in sorted(self.top_gappers_map.items(), key=lambda x: x[1], reverse=True)[
97
- :limit
98
- ]:
99
- try:
100
- if pct_change <= 0:
101
- break
102
- ret.append({
103
- "symbol": ticker,
104
- "volume": self.symbol_data_cache[ticker]["volume"],
105
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
106
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
107
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
108
- "vwap": self.symbol_data_cache[ticker]["vwap"],
109
- "open": self.symbol_data_cache[ticker]["open"],
110
- "close": self.symbol_data_cache[ticker]["close"],
111
- "high": self.symbol_data_cache[ticker]["high"],
112
- "low": self.symbol_data_cache[ticker]["low"],
113
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
114
- "average_size": self.symbol_data_cache[ticker]["average_size"],
115
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
116
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
117
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
118
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
119
- "change": self.symbol_data_cache[ticker]["change"],
120
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
121
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
122
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
123
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
124
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
125
- })
126
- except KeyError:
127
- del self.top_gappers_map[ticker]
128
- return ret
129
-
130
- def top_gainers(self, limit):
131
- ret = []
132
- for ticker, pct_change in sorted(self.top_gainers_map.items(), key=lambda x: x[1], reverse=True)[
133
- :limit
134
- ]:
135
- try:
136
- if pct_change <= 0:
137
- break
138
- ret.append({
139
- "symbol": ticker,
140
- "volume": self.symbol_data_cache[ticker]["volume"],
141
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
142
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
143
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
144
- "vwap": self.symbol_data_cache[ticker]["vwap"],
145
- "open": self.symbol_data_cache[ticker]["open"],
146
- "close": self.symbol_data_cache[ticker]["close"],
147
- "high": self.symbol_data_cache[ticker]["high"],
148
- "low": self.symbol_data_cache[ticker]["low"],
149
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
150
- "average_size": self.symbol_data_cache[ticker]["average_size"],
151
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
152
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
153
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
154
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
155
- "change": self.symbol_data_cache[ticker]["change"],
156
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
157
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
158
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
159
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
160
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
161
- })
162
- except KeyError:
163
- del self.top_gainers_map[ticker]
164
- return ret
22
+ from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
165
23
 
166
24
 
167
25
  class TopStocksAnalyzer(Analyzer):
@@ -233,12 +91,6 @@ class TopStocksAnalyzer(Analyzer):
233
91
  self.last_update_time = current_time
234
92
 
235
93
  result = [
236
- # MarketDataAnalyzerResult(
237
- # data=data,
238
- # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
239
- # cache_ttl=86400, # 1 day
240
- # # publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
241
- # ),
242
94
  MarketDataAnalyzerResult(
243
95
  data=self.cache_item.to_dict(),
244
96
  cache_key=self.cache_key,
@@ -114,8 +114,8 @@ class MassiveDataProcessor:
114
114
  web_socket_message = json.loads(message.body.decode())
115
115
  data = WebSocketMessageSerde.to_dict(web_socket_message)
116
116
 
117
- # Delegate to analyzer (async)
118
- analyzer_results = await self.analyzer.analyze_data(data)
117
+ # Delegate to analyzer
118
+ analyzer_results = self.analyzer.analyze_data(data)
119
119
  if analyzer_results:
120
120
  self.processed += 1
121
121
  for analyzer_result in analyzer_results:
@@ -0,0 +1,19 @@
1
+ from enum import Enum
2
+
3
+
4
+ class MarketDataCacheTTL(Enum):
5
+ # Hours
6
+ ONE_HOUR = 3600
7
+ TWO_HOURS = 7200
8
+ FOUR_HOURS = 14400
9
+ SIX_HOURS = 21600
10
+ EIGHT_HOURS = 28800
11
+
12
+ # Days
13
+ ONE_DAY = 86400
14
+ TWO_DAYS = 172800
15
+ THREE_DAYS = 259200
16
+ FOUR_DAYS = 345600
17
+ FIVE_DAYS = 432000
18
+ SIX_DAYS = 518400
19
+ SEVEN_DAYS = 604800
@@ -0,0 +1,143 @@
1
+ from collections import defaultdict
2
+ from dataclasses import dataclass, field
3
+ from typing import Dict, Optional
4
+
5
+
6
+ # docs
7
+ # https://massive.com/docs/stocks/ws_stocks_am
8
+ # https://massive.com/docs/websocket/stocks/trades
9
+
10
+ @dataclass()
11
+ class TopStocksCacheItem:
12
+ day_start_time: Optional[float] = 0.0
13
+
14
+ # Cached details for each ticker
15
+ symbol_data_cache: Optional[Dict[str, dict]] = field(default_factory=lambda: defaultdict(dict))
16
+
17
+ # Top Volume map
18
+ top_volume_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
19
+
20
+ # Top Gappers map
21
+ top_gappers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
22
+
23
+ # Top Gainers map
24
+ top_gainers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
25
+
26
+ def to_dict(self):
27
+ ret = {
28
+ # Cache start time
29
+ "day_start_time": self.day_start_time,
30
+
31
+ # Maps
32
+ "symbol_data_cache": self.symbol_data_cache,
33
+ "top_volume_map": self.top_volume_map,
34
+ "top_gappers_map": self.top_gappers_map,
35
+ "top_gainers_map": self.top_gainers_map,
36
+ }
37
+ return ret
38
+
39
+ def top_volume(self, limit):
40
+ ret = []
41
+ for ticker, volume in sorted(self.top_volume_map.items(), key=lambda x: x[1], reverse=True)[
42
+ :limit
43
+ ]:
44
+ try:
45
+ ret.append({
46
+ "symbol": ticker,
47
+ "volume": self.symbol_data_cache[ticker]["volume"],
48
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
49
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
50
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
51
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
52
+ "open": self.symbol_data_cache[ticker]["open"],
53
+ "close": self.symbol_data_cache[ticker]["close"],
54
+ "high": self.symbol_data_cache[ticker]["high"],
55
+ "low": self.symbol_data_cache[ticker]["low"],
56
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
57
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
58
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
59
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
60
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
61
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
62
+ "change": self.symbol_data_cache[ticker]["change"],
63
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
64
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
65
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
66
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
67
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
68
+ })
69
+ except KeyError:
70
+ del self.top_volume_map[ticker]
71
+ return ret
72
+
73
+ def top_gappers(self, limit):
74
+ ret = []
75
+ for ticker, pct_change in sorted(self.top_gappers_map.items(), key=lambda x: x[1], reverse=True)[
76
+ :limit
77
+ ]:
78
+ try:
79
+ if pct_change <= 0:
80
+ break
81
+ ret.append({
82
+ "symbol": ticker,
83
+ "volume": self.symbol_data_cache[ticker]["volume"],
84
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
85
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
86
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
87
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
88
+ "open": self.symbol_data_cache[ticker]["open"],
89
+ "close": self.symbol_data_cache[ticker]["close"],
90
+ "high": self.symbol_data_cache[ticker]["high"],
91
+ "low": self.symbol_data_cache[ticker]["low"],
92
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
93
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
94
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
95
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
96
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
97
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
98
+ "change": self.symbol_data_cache[ticker]["change"],
99
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
100
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
101
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
102
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
103
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
104
+ })
105
+ except KeyError:
106
+ del self.top_gappers_map[ticker]
107
+ return ret
108
+
109
+ def top_gainers(self, limit):
110
+ ret = []
111
+ for ticker, pct_change in sorted(self.top_gainers_map.items(), key=lambda x: x[1], reverse=True)[
112
+ :limit
113
+ ]:
114
+ try:
115
+ if pct_change <= 0:
116
+ break
117
+ ret.append({
118
+ "symbol": ticker,
119
+ "volume": self.symbol_data_cache[ticker]["volume"],
120
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
121
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
122
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
123
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
124
+ "open": self.symbol_data_cache[ticker]["open"],
125
+ "close": self.symbol_data_cache[ticker]["close"],
126
+ "high": self.symbol_data_cache[ticker]["high"],
127
+ "low": self.symbol_data_cache[ticker]["low"],
128
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
129
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
130
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
131
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
132
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
133
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
134
+ "change": self.symbol_data_cache[ticker]["change"],
135
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
136
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
137
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
138
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
139
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
140
+ })
141
+ except KeyError:
142
+ del self.top_gainers_map[ticker]
143
+ return ret
@@ -0,0 +1,233 @@
1
+ from unittest.mock import MagicMock
2
+
3
+ import pytest
4
+ from massive.websocket.models import EventType
5
+ from src.kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
6
+ from src.kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
+ from src.kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
9
+
10
+
11
+ @pytest.fixture
12
+ def valid_symbol():
13
+ return "TEST"
14
+
15
+
16
+ @pytest.fixture
17
+ def valid_luld_data(valid_symbol: str):
18
+ return {"event_type": EventType.LimitUpLimitDown.value, "symbol": valid_symbol, "test": "data"}
19
+
20
+
21
+ @pytest.fixture
22
+ def valid_equity_agg_data(valid_symbol: str):
23
+ return {"event_type": EventType.EquityAgg.value, "symbol": valid_symbol, "test": "data"}
24
+
25
+
26
+ @pytest.fixture
27
+ def valid_equity_agg_minute_data(valid_symbol: str):
28
+ return {"event_type": EventType.EquityAggMin.value, "symbol": valid_symbol, "test": "data"}
29
+
30
+
31
+ @pytest.fixture
32
+ def valid_equity_trade_data(valid_symbol: str):
33
+ return {"event_type": EventType.EquityTrade.value, "symbol": valid_symbol, "test": "data"}
34
+
35
+
36
+ @pytest.fixture
37
+ def valid_equity_quote_data(valid_symbol: str):
38
+ return {"event_type": EventType.EquityQuote.value, "symbol": valid_symbol, "test": "data"}
39
+
40
+
41
+ def test_analyze_data_with_valid_luld_event_expect_valid_result(valid_symbol, valid_luld_data):
42
+ # Arrange
43
+ sut = MassiveDataAnalyzer()
44
+ symbol = valid_symbol
45
+ data = valid_luld_data
46
+
47
+ # Act
48
+ result = sut.analyze_data(data)
49
+
50
+ # Assert
51
+ assert len(result) == 1
52
+ assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
53
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
54
+ assert result[0].publish_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
55
+ assert result[0].data == data
56
+
57
+
58
+ def test_analyze_data_with_equity_agg_event_happy_path(valid_symbol, valid_equity_agg_data):
59
+ # Arrange
60
+ sut = MassiveDataAnalyzer()
61
+
62
+ # Act
63
+ result = sut.analyze_data(data=valid_equity_agg_data)
64
+
65
+ # Assert
66
+ assert len(result) == 1
67
+ assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
68
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
69
+ assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
70
+ assert result[0].data == valid_equity_agg_data
71
+
72
+
73
+ def test_analyze_data_with_equity_agg_min_event_happy_path(valid_symbol, valid_equity_agg_minute_data):
74
+ # Arrange
75
+ sut = MassiveDataAnalyzer()
76
+
77
+ # Act
78
+ result = sut.analyze_data(data=valid_equity_agg_minute_data)
79
+
80
+ # Assert
81
+ assert len(result) == 1
82
+ assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
83
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
84
+ assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
85
+ assert result[0].data == valid_equity_agg_minute_data
86
+
87
+
88
+ def test_analyze_data_with_equity_trade_event_happy_path(valid_symbol, valid_equity_trade_data):
89
+ # Arrange
90
+ sut = MassiveDataAnalyzer()
91
+
92
+ # Act
93
+ result = sut.analyze_data(data=valid_equity_trade_data)
94
+
95
+ # Assert
96
+ assert len(result) == 1
97
+ assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
98
+ assert result[0].cache_ttl == MarketDataCacheTTL.EIGHT_HOURS.value
99
+ assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
100
+ assert result[0].data == valid_equity_trade_data
101
+
102
+
103
+ def test_analyze_data_equity_quote_event_happy_path(valid_symbol, valid_equity_quote_data):
104
+ # Arrange
105
+ sut = MassiveDataAnalyzer()
106
+
107
+ # Act
108
+ result = sut.analyze_data(data=valid_equity_quote_data)
109
+
110
+ # Assert
111
+ assert len(result) == 1
112
+ assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
113
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
114
+ assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
115
+ assert result[0].data == valid_equity_quote_data
116
+
117
+
118
+ def test_analyze_data_with_missing_event_type_expect_unknown_event():
119
+ # Arrange
120
+ sut = MassiveDataAnalyzer()
121
+ sut.handle_unknown_event = MagicMock(return_value=None)
122
+ data = {"symbol": "TEST", "data": "test"}
123
+
124
+ # Act
125
+ result = sut.analyze_data(data)
126
+
127
+ # Assert
128
+ sut.handle_unknown_event.assert_called_once_with(data)
129
+ assert result is None
130
+
131
+
132
+ def test_analyze_data_with_missing_symbol_expect_unknown_event():
133
+ # Arrange
134
+ sut = MassiveDataAnalyzer()
135
+ sut.handle_unknown_event = MagicMock(return_value=None)
136
+ data = {"event_type": EventType.LimitUpLimitDown.value, "data": "test"}
137
+
138
+ # Act
139
+ result = sut.analyze_data(data)
140
+
141
+ # Assert
142
+ sut.handle_unknown_event.assert_called_once_with(data)
143
+ assert result is None
144
+
145
+
146
+ def test_analyze_data_with_unsupported_event_expect_unknown_event():
147
+ # Arrange
148
+ sut = MassiveDataAnalyzer()
149
+ sut.handle_unknown_event = MagicMock(return_value=None)
150
+ data = {"event_type": "UnsupportedEvent", "symbol": "TEST", "test": "data"}
151
+
152
+ # Act
153
+ result = sut.analyze_data(data)
154
+
155
+ # Assert
156
+ sut.handle_unknown_event.assert_called_once_with(data)
157
+ assert result is None
158
+
159
+
160
+ def test_handle_luld_event_happy_path(valid_symbol, valid_luld_data):
161
+ # Arrange
162
+ sut = MassiveDataAnalyzer()
163
+
164
+ # Act
165
+ result = sut.handle_luld_event(data=valid_luld_data, symbol=valid_symbol)
166
+
167
+ # Assert
168
+ assert len(result) == 1
169
+ assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{valid_symbol}"
170
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
171
+ assert result[0].publish_key == f"{MarketDataCacheKeys.HALTS.value}:{valid_symbol}"
172
+ assert result[0].data == valid_luld_data
173
+
174
+
175
+ def test_handle_equity_agg_event_happy_path(valid_symbol, valid_equity_agg_data):
176
+ # Arrange
177
+ sut = MassiveDataAnalyzer()
178
+
179
+ # Act
180
+ result = sut.handle_equity_agg_event(data=valid_equity_agg_data, symbol=valid_symbol)
181
+
182
+ # Assert
183
+ assert len(result) == 1
184
+ assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
185
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
186
+ assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
187
+ assert result[0].data == valid_equity_agg_data
188
+
189
+
190
+ def test_handle_equity_trade_event_happy_path(valid_symbol, valid_equity_trade_data):
191
+ # Arrange
192
+ sut = MassiveDataAnalyzer()
193
+
194
+ # Act
195
+ result = sut.handle_equity_trade_event(data=valid_equity_trade_data, symbol=valid_symbol)
196
+
197
+ # Assert
198
+ assert len(result) == 1
199
+ assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
200
+ assert result[0].cache_ttl == MarketDataCacheTTL.EIGHT_HOURS.value
201
+ assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
202
+ assert result[0].data == valid_equity_trade_data
203
+
204
+
205
+ def test_handle_equity_quote_event_happy_path(valid_symbol, valid_equity_quote_data):
206
+ # Arrange
207
+ sut = MassiveDataAnalyzer()
208
+
209
+ # Act
210
+ result = sut.handle_equity_quote_event(data=valid_equity_quote_data, symbol=valid_symbol)
211
+
212
+ # Assert
213
+ assert len(result) == 1
214
+ assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
215
+ assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
216
+ assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
217
+ assert result[0].data == valid_equity_quote_data
218
+
219
+
220
+ def test_handle_unknown_event_happy_path():
221
+ # Arrange
222
+ sut = MassiveDataAnalyzer()
223
+ data = {"unknown": "event"}
224
+
225
+ # Act
226
+ result = sut.handle_unknown_event(data=data)
227
+
228
+ # Assert
229
+ assert len(result) == 1
230
+ assert result[0].cache_key.startswith(f"{MarketDataCacheKeys.UNKNOWN.value}:")
231
+ assert result[0].cache_ttl == MarketDataCacheTTL.ONE_DAY.value
232
+ assert result[0].publish_key == MarketDataCacheKeys.UNKNOWN.value
233
+ assert result[0].data == data
File without changes
File without changes
File without changes
@@ -0,0 +1,109 @@
1
+ # tests/test_top_stocks_cache_item.py
2
+ import unittest
3
+ from collections import defaultdict
4
+
5
+ from src.kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
6
+
7
+
8
+ class TestTopStocksCacheItem(unittest.TestCase):
9
+ """Unit tests for the TopStocksCacheItem class."""
10
+
11
+ def setUp(self):
12
+ """Set up a TopStocksCacheItem instance for testing."""
13
+ self.cache_item = TopStocksCacheItem()
14
+
15
+ def test_initialization(self):
16
+ """Test the default initialization of TopStocksCacheItem."""
17
+ self.assertEqual(self.cache_item.day_start_time, 0.0)
18
+ self.assertIsInstance(self.cache_item.symbol_data_cache, defaultdict)
19
+ self.assertIsInstance(self.cache_item.top_volume_map, defaultdict)
20
+ self.assertIsInstance(self.cache_item.top_gappers_map, defaultdict)
21
+ self.assertIsInstance(self.cache_item.top_gainers_map, defaultdict)
22
+
23
+ def test_to_dict_method(self):
24
+ """Test the to_dict method of TopStocksCacheItem."""
25
+ expected_dict = {
26
+ "day_start_time": 0.0,
27
+ "symbol_data_cache": self.cache_item.symbol_data_cache,
28
+ "top_volume_map": self.cache_item.top_volume_map,
29
+ "top_gappers_map": self.cache_item.top_gappers_map,
30
+ "top_gainers_map": self.cache_item.top_gainers_map,
31
+ }
32
+ self.assertEqual(self.cache_item.to_dict(), expected_dict)
33
+
34
+ def test_top_volume_method(self):
35
+ """Test the top_volume method with a limit."""
36
+ self.cache_item.top_volume_map = {"AAPL": 1200, "GOOG": 600, "AMZN": 500}
37
+ self.cache_item.symbol_data_cache = {
38
+ "AAPL": {"volume": 1000, "accumulated_volume": 1200, "relative_volume": 1.2, "official_open_price": 150,
39
+ "vwap": 155, "open": 145, "close": 152, "high": 160, "low": 142, "aggregate_vwap": 156,
40
+ "average_size": 50, "avg_volume": 1000, "prev_day_close": 148, "prev_day_volume": 900,
41
+ "prev_day_vwap": 154, "change": 4, "pct_change": 2.7, "change_since_open": 7,
42
+ "pct_change_since_open": 4.8, "start_timestamp": 100000, "end_timestamp": 110000},
43
+ "AMZN": {"volume": 300, "accumulated_volume": 500, "relative_volume": 1.2, "official_open_price": 3300,
44
+ "vwap": 3500, "open": 3250, "close": 3520, "high": 3600, "low": 3200, "aggregate_vwap": 3450,
45
+ "average_size": 85, "avg_volume": 4000, "prev_day_close": 3200, "prev_day_volume": 3900,
46
+ "prev_day_vwap": 3400, "change": 200, "pct_change": 10.0, "change_since_open": 270,
47
+ "pct_change_since_open": 8.3, "start_timestamp": 300000, "end_timestamp": 310000},
48
+ "GOOG": {"volume": 500, "accumulated_volume": 600, "relative_volume": 1.0, "official_open_price": 2500,
49
+ "vwap": 2550, "open": 2450, "close": 2520, "high": 2600, "low": 2400, "aggregate_vwap": 2560,
50
+ "average_size": 65, "avg_volume": 2000, "prev_day_close": 2510, "prev_day_volume": 1900,
51
+ "prev_day_vwap": 2530, "change": 10, "pct_change": 0.4, "change_since_open": 70,
52
+ "pct_change_since_open": 2.9, "start_timestamp": 200000, "end_timestamp": 210000},
53
+ }
54
+ result = self.cache_item.top_volume(2)
55
+ self.assertEqual(2, len(result))
56
+ self.assertEqual(result[0]["symbol"], "AAPL")
57
+ self.assertEqual(result[1]["symbol"], "GOOG")
58
+
59
+ def test_top_gappers_method(self):
60
+ """Test the top_gappers method with a limit."""
61
+ self.cache_item.top_gappers_map = {"AAPL": 5.0, "GOOG": -3.0, "AMZN": 10.0}
62
+ self.cache_item.symbol_data_cache = {
63
+ "AAPL": {"volume": 1000, "accumulated_volume": 1200, "relative_volume": 1.5, "official_open_price": 150,
64
+ "vwap": 155, "open": 145, "close": 152, "high": 160, "low": 142, "aggregate_vwap": 156,
65
+ "average_size": 50, "avg_volume": 1000, "prev_day_close": 148, "prev_day_volume": 900,
66
+ "prev_day_vwap": 154, "change": 4, "pct_change": 5.0, "change_since_open": 7,
67
+ "pct_change_since_open": 2.5, "start_timestamp": 100000, "end_timestamp": 110000},
68
+ "AMZN": {"volume": 300, "accumulated_volume": 500, "relative_volume": 1.2, "official_open_price": 3300,
69
+ "vwap": 3500, "open": 3250, "close": 3520, "high": 3600, "low": 3200, "aggregate_vwap": 3450,
70
+ "average_size": 85, "avg_volume": 4000, "prev_day_close": 3200, "prev_day_volume": 3900,
71
+ "prev_day_vwap": 3400, "change": 200, "pct_change": 10.0, "change_since_open": 270,
72
+ "pct_change_since_open": 8.3, "start_timestamp": 300000, "end_timestamp": 310000},
73
+ "GOOG": {"volume": 500, "accumulated_volume": 600, "relative_volume": 1.0, "official_open_price": 2500,
74
+ "vwap": 2550, "open": 2450, "close": 2520, "high": 2600, "low": 2400, "aggregate_vwap": 2560,
75
+ "average_size": 65, "avg_volume": 2000, "prev_day_close": 2510, "prev_day_volume": 1900,
76
+ "prev_day_vwap": 2530, "change": 10, "pct_change": 0.4, "change_since_open": 70,
77
+ "pct_change_since_open": 2.9, "start_timestamp": 200000, "end_timestamp": 210000},
78
+ }
79
+ result = self.cache_item.top_gappers(1)
80
+ self.assertEqual(len(result), 1)
81
+ self.assertEqual(result[0]["symbol"], "AMZN")
82
+
83
+ def test_top_gainers_method(self):
84
+ """Test the top_gainers method with a limit."""
85
+ self.cache_item.top_gainers_map = {"AAPL": 2.5, "GOOG": -0.5, "AMZN": 8.3}
86
+ self.cache_item.symbol_data_cache = {
87
+ "AAPL": {"volume": 500, "accumulated_volume": 800, "relative_volume": 1.6, "official_open_price": 140,
88
+ "vwap": 145, "open": 130, "close": 150, "high": 155, "low": 128, "aggregate_vwap": 150,
89
+ "average_size": 45, "avg_volume": 900, "prev_day_close": 142, "prev_day_volume": 850,
90
+ "prev_day_vwap": 145, "change": 8, "pct_change": 2.5, "change_since_open": 20,
91
+ "pct_change_since_open": 15.4, "start_timestamp": 150000, "end_timestamp": 160000},
92
+ "AMZN": {"volume": 800, "accumulated_volume": 1200, "relative_volume": 1.4, "official_open_price": 3200,
93
+ "vwap": 3300, "open": 3150, "close": 3400, "high": 3450, "low": 3100, "aggregate_vwap": 3350,
94
+ "average_size": 70, "avg_volume": 3900, "prev_day_close": 3250, "prev_day_volume": 3800,
95
+ "prev_day_vwap": 3300, "change": 150, "pct_change": 4.6, "change_since_open": 250,
96
+ "pct_change_since_open": 8.3, "start_timestamp": 170000, "end_timestamp": 180000},
97
+ "GOOG": {"volume": 500, "accumulated_volume": 600, "relative_volume": 1.0, "official_open_price": 2500,
98
+ "vwap": 2550, "open": 2450, "close": 2520, "high": 2600, "low": 2400, "aggregate_vwap": 2560,
99
+ "average_size": 65, "avg_volume": 2000, "prev_day_close": 2510, "prev_day_volume": 1900,
100
+ "prev_day_vwap": 2530, "change": 10, "pct_change": 0.4, "change_since_open": 70,
101
+ "pct_change_since_open": 2.9, "start_timestamp": 200000, "end_timestamp": 210000},
102
+ }
103
+ result = self.cache_item.top_gainers(1)
104
+ self.assertEqual(len(result), 1)
105
+ self.assertEqual(result[0]["symbol"], "AMZN")
106
+
107
+
108
+ if __name__ == "__main__":
109
+ unittest.main()
@@ -1,102 +0,0 @@
1
- import logging
2
- from time import time
3
- from typing import List, Optional
4
- from massive.websocket.models import EventType
5
-
6
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
- from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
-
9
-
10
- class MassiveDataAnalyzer:
11
- def __init__(self):
12
- self.logger = logging.getLogger(__name__)
13
- self.event_handlers = {
14
- EventType.LimitUpLimitDown.value: self.handle_luld_event,
15
- EventType.EquityAgg.value: self.handle_equity_agg_event,
16
- EventType.EquityAggMin.value: self.handle_equity_agg_event,
17
- EventType.EquityTrade.value: self.handle_equity_trade_event,
18
- EventType.EquityQuote.value: self.handle_equity_quote_event,
19
- }
20
-
21
- async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
22
- """
23
- Process raw market data message
24
-
25
- Args:
26
- data: serialized message from Massive/Polygon.io
27
-
28
- Returns:
29
- Processed result dict or None if message should be discarded
30
- """
31
- if "event_type" not in data:
32
- self.logger.info("Message missing 'event_type'")
33
- return await self.handle_unknown_event(data)
34
- event_type = data.get("event_type")
35
-
36
- if "symbol" not in data:
37
- self.logger.info("Message missing 'symbol'")
38
- return await self.handle_unknown_event(data)
39
- symbol = data.get("symbol")
40
-
41
- if event_type in self.event_handlers:
42
- return await self.event_handlers[event_type](**{"data": data, "symbol": symbol})
43
- else:
44
- self.logger.warning(f"Unsupported message type: {event_type}")
45
- return await self.handle_unknown_event(data)
46
-
47
- async def handle_luld_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
48
- try:
49
- return [MarketDataAnalyzerResult(
50
- data=data,
51
- cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
52
- cache_ttl=28500, # 7 hours, 55 minutes
53
- publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
54
- )]
55
- except Exception as e:
56
- self.logger.error(f"Error processing LULD message for {symbol}: {data}", e)
57
-
58
- async def handle_equity_agg_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
- try:
60
- return [MarketDataAnalyzerResult(
61
- data=data,
62
- # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
63
- # cache_ttl=259200, # 3 days
64
- publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
65
- )]
66
- except Exception as e:
67
- self.logger.error(f"Error processing EquityAgg message for {symbol}: {data}", e)
68
-
69
- async def handle_equity_trade_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
70
- try:
71
- return [MarketDataAnalyzerResult(
72
- data=data,
73
- # cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
74
- # cache_ttl=28500, # 7 hours, 55 minutes
75
- publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
76
- )]
77
- except Exception as e:
78
- self.logger.error(f"Error processing EquityTrade message for {symbol}: {data}", e)
79
-
80
- async def handle_equity_quote_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
81
- try:
82
- return [MarketDataAnalyzerResult(
83
- data=data,
84
- # cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
85
- # cache_ttl=259200, # 3 days
86
- publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
87
- )]
88
- except Exception as e:
89
- self.logger.error(f"Error processing EquityQuote message for {symbol}: {data}", e)
90
-
91
- async def handle_unknown_event(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
92
- try:
93
- timestamp = f"{time()}".replace('.','')
94
- cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
95
- return [MarketDataAnalyzerResult(
96
- data=data,
97
- cache_key=cache_key,
98
- cache_ttl=86400, # 1 days
99
- publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
100
- )]
101
- except Exception as e:
102
- self.logger.error(f"Error processing unknown message type: {data}", e)
File without changes
File without changes