kuhl-haus-mdp 0.1.11__tar.gz → 0.1.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/PKG-INFO +1 -1
  2. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/pyproject.toml +1 -1
  3. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/analyzers/analyzer.py +1 -1
  4. kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +117 -0
  5. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/analyzers/top_stocks.py +6 -6
  6. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/components/market_data_cache.py +13 -13
  7. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/components/market_data_scanner.py +1 -1
  8. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/integ → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/components}/massive_data_processor.py +2 -2
  9. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/integ → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/components}/massive_data_queues.py +2 -2
  10. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/enum}/market_data_cache_keys.py +1 -1
  11. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/enum}/market_data_cache_ttl.py +1 -1
  12. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/enum}/market_data_pubsub_keys.py +1 -1
  13. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/helpers/queue_name_resolver.py +1 -1
  14. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/analyzers/test_massive_data_analyzer.py +58 -13
  15. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/analyzers/test_top_stocks_rehydrate.py +4 -4
  16. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/components/test_market_data_cache.py +7 -7
  17. {kuhl_haus_mdp-0.1.11/tests/models → kuhl_haus_mdp-0.1.12/tests/data}/test_top_stocks_cache_item.py +1 -1
  18. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/helpers/test_queue_name_resolver.py +1 -1
  19. {kuhl_haus_mdp-0.1.11/tests/integ → kuhl_haus_mdp-0.1.12/tests/helpers}/test_web_socket_message_serde.py +1 -1
  20. kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +0 -93
  21. kuhl_haus_mdp-0.1.11/tests/models/__init__.py +0 -0
  22. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/LICENSE.txt +0 -0
  23. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/README.md +0 -0
  24. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/__init__.py +0 -0
  25. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/analyzers/__init__.py +0 -0
  26. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/components/__init__.py +0 -0
  27. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/integ → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/components}/massive_data_listener.py +0 -0
  28. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/components/widget_data_service.py +0 -0
  29. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/helpers → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/data}/__init__.py +0 -0
  30. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/data}/market_data_analyzer_result.py +0 -0
  31. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/data}/top_stocks_cache_item.py +0 -0
  32. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/integ → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/enum}/__init__.py +0 -0
  33. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/enum}/constants.py +0 -0
  34. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/enum}/market_data_scanner_names.py +0 -0
  35. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/enum}/massive_data_queue.py +0 -0
  36. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/models → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/helpers}/__init__.py +0 -0
  37. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/helpers/process_manager.py +0 -0
  38. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/src/kuhl_haus/mdp/helpers/utils.py +0 -0
  39. {kuhl_haus_mdp-0.1.11/src/kuhl_haus/mdp/integ → kuhl_haus_mdp-0.1.12/src/kuhl_haus/mdp/helpers}/web_socket_message_serde.py +0 -0
  40. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/__init__.py +0 -0
  41. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/analyzers/__init__.py +0 -0
  42. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/components/__init__.py +0 -0
  43. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/components/test_market_data_scanner.py +0 -0
  44. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/components/test_widget_data_service.py +0 -0
  45. {kuhl_haus_mdp-0.1.11/tests/helpers → kuhl_haus_mdp-0.1.12/tests/data}/__init__.py +0 -0
  46. {kuhl_haus_mdp-0.1.11/tests/integ → kuhl_haus_mdp-0.1.12/tests/helpers}/__init__.py +0 -0
  47. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/helpers/test_process_manager.py +0 -0
  48. {kuhl_haus_mdp-0.1.11 → kuhl_haus_mdp-0.1.12}/tests/helpers/test_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.11
3
+ Version: 0.1.12
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -30,7 +30,7 @@ dependencies = [
30
30
  "uvicorn[standard]",
31
31
  "websockets",
32
32
  ]
33
- version = "0.1.11"
33
+ version = "0.1.12"
34
34
 
35
35
  [project.license]
36
36
  file = "LICENSE.txt"
@@ -1,5 +1,5 @@
1
1
  from typing import Optional, List
2
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
2
+ from kuhl_haus.mdp.data.market_data_analyzer_result import MarketDataAnalyzerResult
3
3
  from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
4
4
 
5
5
 
@@ -0,0 +1,117 @@
1
+ import logging
2
+ from time import time
3
+ from typing import List, Optional
4
+ from massive.websocket.models import EventType
5
+
6
+ from kuhl_haus.mdp.data.market_data_analyzer_result import MarketDataAnalyzerResult
7
+ from kuhl_haus.mdp.enum.market_data_cache_keys import MarketDataCacheKeys
8
+ from kuhl_haus.mdp.enum.market_data_cache_ttl import MarketDataCacheTTL
9
+
10
+
11
+ class MassiveDataAnalyzer:
12
+ cache_agg_event: bool
13
+ cache_trade_event: bool
14
+ cache_quote_event: bool
15
+
16
+ def __init__(self, cache_agg_event: bool = False, cache_trade_event: bool = False, cache_quote_event: bool = False):
17
+ self.logger = logging.getLogger(__name__)
18
+ self.cache_agg_event = cache_agg_event
19
+ self.cache_trade_event = cache_trade_event
20
+ self.cache_quote_event = cache_quote_event
21
+ self.event_handlers = {
22
+ EventType.LimitUpLimitDown.value: self.handle_luld_event,
23
+ EventType.EquityAgg.value: self.handle_equity_agg_event,
24
+ EventType.EquityAggMin.value: self.handle_equity_agg_event,
25
+ EventType.EquityTrade.value: self.handle_equity_trade_event,
26
+ EventType.EquityQuote.value: self.handle_equity_quote_event,
27
+ }
28
+
29
+ def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
30
+ """
31
+ Process raw market data message
32
+
33
+ Args:
34
+ data: serialized message from Massive/Polygon.io
35
+
36
+ Returns:
37
+ Processed result dict or None if message should be discarded
38
+ """
39
+ if "event_type" not in data:
40
+ self.logger.info("Message missing 'event_type'")
41
+ return self.handle_unknown_event(data)
42
+ event_type = data.get("event_type")
43
+
44
+ if "symbol" not in data:
45
+ self.logger.info("Message missing 'symbol'")
46
+ return self.handle_unknown_event(data)
47
+ symbol = data.get("symbol")
48
+
49
+ if event_type in self.event_handlers:
50
+ return self.event_handlers[event_type](**{"data": data, "symbol": symbol})
51
+ else:
52
+ self.logger.warning(f"Unsupported message type: {event_type}")
53
+ return self.handle_unknown_event(data)
54
+
55
+ @staticmethod
56
+ def handle_luld_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
57
+ return [MarketDataAnalyzerResult(
58
+ data=data,
59
+ cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
60
+ cache_ttl=MarketDataCacheTTL.HALTS.value,
61
+ publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
62
+ )]
63
+
64
+ def handle_equity_agg_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
65
+ if self.cache_agg_event:
66
+ return [MarketDataAnalyzerResult(
67
+ data=data,
68
+ cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
69
+ cache_ttl=MarketDataCacheTTL.AGGREGATE.value,
70
+ publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
71
+ )]
72
+ else:
73
+ return [MarketDataAnalyzerResult(
74
+ data=data,
75
+ publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
76
+ )]
77
+
78
+ def handle_equity_trade_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
79
+ if self.cache_trade_event:
80
+ return [MarketDataAnalyzerResult(
81
+ data=data,
82
+ cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
83
+ cache_ttl=MarketDataCacheTTL.TRADES.value,
84
+ publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
85
+ )]
86
+ else:
87
+ return [MarketDataAnalyzerResult(
88
+ data=data,
89
+ publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
90
+ )]
91
+
92
+ def handle_equity_quote_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
93
+ if self.cache_quote_event:
94
+ return [MarketDataAnalyzerResult(
95
+ data=data,
96
+ cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
97
+ cache_ttl=MarketDataCacheTTL.QUOTES.value,
98
+ publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
99
+ )]
100
+ else:
101
+ return [MarketDataAnalyzerResult(
102
+ data=data,
103
+ # cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
104
+ # cache_ttl=MarketDataCacheTTL.QUOTES.value,
105
+ publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
106
+ )]
107
+
108
+ @staticmethod
109
+ def handle_unknown_event(data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
110
+ timestamp = f"{time()}".replace('.','')
111
+ cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
112
+ return [MarketDataAnalyzerResult(
113
+ data=data,
114
+ cache_key=cache_key,
115
+ cache_ttl=MarketDataCacheTTL.UNKNOWN.value,
116
+ publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
117
+ )]
@@ -11,11 +11,11 @@ from massive.websocket.models import (
11
11
 
12
12
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
13
13
  from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
14
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
15
- from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
16
- from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
17
- from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
18
- from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
14
+ from kuhl_haus.mdp.data.market_data_analyzer_result import MarketDataAnalyzerResult
15
+ from kuhl_haus.mdp.data.top_stocks_cache_item import TopStocksCacheItem
16
+ from kuhl_haus.mdp.enum.market_data_cache_keys import MarketDataCacheKeys
17
+ from kuhl_haus.mdp.enum.market_data_cache_ttl import MarketDataCacheTTL
18
+ from kuhl_haus.mdp.enum.market_data_pubsub_keys import MarketDataPubSubKeys
19
19
 
20
20
 
21
21
  class TopStocksAnalyzer(Analyzer):
@@ -41,7 +41,7 @@ class TopStocksAnalyzer(Analyzer):
41
41
  self.cache_item = TopStocksCacheItem()
42
42
  self.logger.info(f"Outside market hours ({et_now.strftime('%H:%M:%S %Z')}), clearing cache.")
43
43
  return
44
- data = await self.cache.get_cache(self.cache_key)
44
+ data = await self.cache.read(self.cache_key)
45
45
  if not data:
46
46
  self.cache_item = TopStocksCacheItem()
47
47
  self.logger.info("No data to rehydrate TopStocksCacheItem.")
@@ -14,8 +14,8 @@ from massive.rest.models import (
14
14
  )
15
15
 
16
16
  from kuhl_haus.mdp.helpers.utils import ticker_snapshot_to_dict
17
- from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
18
- from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
17
+ from kuhl_haus.mdp.enum.market_data_cache_keys import MarketDataCacheKeys
18
+ from kuhl_haus.mdp.enum.market_data_cache_ttl import MarketDataCacheTTL
19
19
 
20
20
 
21
21
  class MarketDataCache:
@@ -26,7 +26,7 @@ class MarketDataCache:
26
26
  self.redis_client = redis_client
27
27
  self.http_session = None
28
28
 
29
- async def delete_cache(self, cache_key: str):
29
+ async def delete(self, cache_key: str):
30
30
  """
31
31
  Delete cache entry.
32
32
 
@@ -38,21 +38,21 @@ class MarketDataCache:
38
38
  except Exception as e:
39
39
  self.logger.error(f"Error deleting cache entry: {e}")
40
40
 
41
- async def get_cache(self, cache_key: str) -> Optional[dict]:
41
+ async def read(self, cache_key: str) -> Optional[dict]:
42
42
  """Fetch current value from Redis cache (for snapshot requests)."""
43
43
  value = await self.redis_client.get(cache_key)
44
44
  if value:
45
45
  return json.loads(value)
46
46
  return None
47
47
 
48
- async def cache_data(self, data: Any, cache_key: str, cache_ttl: int = 0):
48
+ async def write(self, data: Any, cache_key: str, cache_ttl: int = 0):
49
49
  if cache_ttl > 0:
50
50
  await self.redis_client.setex(cache_key, cache_ttl, json.dumps(data))
51
51
  else:
52
52
  await self.redis_client.set(cache_key, json.dumps(data))
53
53
  self.logger.info(f"Cached data for {cache_key}")
54
54
 
55
- async def publish_data(self, data: Any, publish_key: str = None):
55
+ async def broadcast(self, data: Any, publish_key: str = None):
56
56
  await self.redis_client.publish(publish_key, json.dumps(data))
57
57
  self.logger.info(f"Published data for {publish_key}")
58
58
 
@@ -64,12 +64,12 @@ class MarketDataCache:
64
64
  :return: None
65
65
  """
66
66
  cache_key = f"{MarketDataCacheKeys.TICKER_SNAPSHOTS.value}:{ticker}"
67
- await self.delete_cache(cache_key=cache_key)
67
+ await self.delete(cache_key=cache_key)
68
68
 
69
69
  async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
70
70
  self.logger.info(f"Getting snapshot for {ticker}")
71
71
  cache_key = f"{MarketDataCacheKeys.TICKER_SNAPSHOTS.value}:{ticker}"
72
- result = await self.get_cache(cache_key=cache_key)
72
+ result = await self.read(cache_key=cache_key)
73
73
  if result:
74
74
  self.logger.info(f"Returning cached snapshot for {ticker}")
75
75
  snapshot = TickerSnapshot(**result)
@@ -80,7 +80,7 @@ class MarketDataCache:
80
80
  )
81
81
  self.logger.info(f"Snapshot result: {snapshot}")
82
82
  data = ticker_snapshot_to_dict(snapshot)
83
- await self.cache_data(
83
+ await self.write(
84
84
  data=data,
85
85
  cache_key=cache_key,
86
86
  cache_ttl=MarketDataCacheTTL.TICKER_SNAPSHOTS.value
@@ -90,7 +90,7 @@ class MarketDataCache:
90
90
  async def get_avg_volume(self, ticker: str):
91
91
  self.logger.info(f"Getting average volume for {ticker}")
92
92
  cache_key = f"{MarketDataCacheKeys.TICKER_AVG_VOLUME.value}:{ticker}"
93
- avg_volume = await self.get_cache(cache_key=cache_key)
93
+ avg_volume = await self.read(cache_key=cache_key)
94
94
  if avg_volume:
95
95
  self.logger.info(f"Returning cached value for {ticker}: {avg_volume}")
96
96
  return avg_volume
@@ -136,7 +136,7 @@ class MarketDataCache:
136
136
  avg_volume = total_volume / periods_calculated
137
137
 
138
138
  self.logger.info(f"average volume {ticker}: {avg_volume}")
139
- await self.cache_data(
139
+ await self.write(
140
140
  data=avg_volume,
141
141
  cache_key=cache_key,
142
142
  cache_ttl=MarketDataCacheTTL.TICKER_AVG_VOLUME.value
@@ -146,7 +146,7 @@ class MarketDataCache:
146
146
  async def get_free_float(self, ticker: str):
147
147
  self.logger.info(f"Getting free float for {ticker}")
148
148
  cache_key = f"{MarketDataCacheKeys.TICKER_FREE_FLOAT.value}:{ticker}"
149
- free_float = await self.get_cache(cache_key=cache_key)
149
+ free_float = await self.read(cache_key=cache_key)
150
150
  if free_float:
151
151
  self.logger.info(f"Returning cached value for {ticker}: {free_float}")
152
152
  return free_float
@@ -183,7 +183,7 @@ class MarketDataCache:
183
183
  raise
184
184
 
185
185
  self.logger.info(f"free float {ticker}: {free_float}")
186
- await self.cache_data(
186
+ await self.write(
187
187
  data=free_float,
188
188
  cache_key=cache_key,
189
189
  cache_ttl=MarketDataCacheTTL.TICKER_FREE_FLOAT.value
@@ -9,7 +9,7 @@ from redis.exceptions import ConnectionError
9
9
  from massive.rest import RESTClient
10
10
 
11
11
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
12
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
12
+ from kuhl_haus.mdp.data.market_data_analyzer_result import MarketDataAnalyzerResult
13
13
  from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
14
14
 
15
15
 
@@ -7,8 +7,8 @@ import redis.asyncio as aioredis
7
7
  from aio_pika.abc import AbstractIncomingMessage
8
8
 
9
9
  from kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
10
- from kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
11
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
10
+ from kuhl_haus.mdp.helpers.web_socket_message_serde import WebSocketMessageSerde
11
+ from kuhl_haus.mdp.data.market_data_analyzer_result import MarketDataAnalyzerResult
12
12
 
13
13
 
14
14
  class MassiveDataProcessor:
@@ -6,9 +6,9 @@ from aio_pika import DeliveryMode
6
6
  from aio_pika.abc import AbstractConnection, AbstractChannel
7
7
  from massive.websocket.models import WebSocketMessage
8
8
 
9
- from kuhl_haus.mdp.models.massive_data_queue import MassiveDataQueue
9
+ from kuhl_haus.mdp.enum.massive_data_queue import MassiveDataQueue
10
10
  from kuhl_haus.mdp.helpers.queue_name_resolver import QueueNameResolver
11
- from kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
11
+ from kuhl_haus.mdp.helpers.web_socket_message_serde import WebSocketMessageSerde
12
12
 
13
13
 
14
14
  class MassiveDataQueues:
@@ -1,6 +1,6 @@
1
1
  from enum import Enum
2
2
 
3
- from kuhl_haus.mdp.models.market_data_scanner_names import MarketDataScannerNames
3
+ from kuhl_haus.mdp.enum.market_data_scanner_names import MarketDataScannerNames
4
4
 
5
5
 
6
6
  class MarketDataCacheKeys(Enum):
@@ -1,5 +1,5 @@
1
1
  from enum import Enum
2
- from kuhl_haus.mdp.models.constants import (
2
+ from kuhl_haus.mdp.enum.constants import (
3
3
  EIGHT_HOURS,
4
4
  FIVE_MINUTES,
5
5
  ONE_DAY,
@@ -1,6 +1,6 @@
1
1
  from enum import Enum
2
2
 
3
- from kuhl_haus.mdp.models.market_data_scanner_names import MarketDataScannerNames
3
+ from kuhl_haus.mdp.enum.market_data_scanner_names import MarketDataScannerNames
4
4
 
5
5
 
6
6
  class MarketDataPubSubKeys(Enum):
@@ -6,7 +6,7 @@ from massive.websocket.models import (
6
6
  LimitUpLimitDown,
7
7
  )
8
8
 
9
- from kuhl_haus.mdp.models.massive_data_queue import MassiveDataQueue
9
+ from kuhl_haus.mdp.enum.massive_data_queue import MassiveDataQueue
10
10
 
11
11
 
12
12
  class QueueNameResolver:
@@ -3,9 +3,9 @@ from unittest.mock import MagicMock
3
3
  import pytest
4
4
  from massive.websocket.models import EventType
5
5
 
6
- from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
6
+ from kuhl_haus.mdp.enum.market_data_cache_ttl import MarketDataCacheTTL
7
7
  from src.kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
8
- from src.kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
+ from src.kuhl_haus.mdp.enum.market_data_cache_keys import MarketDataCacheKeys
9
9
 
10
10
 
11
11
  @pytest.fixture
@@ -49,8 +49,8 @@ def test_analyze_data_with_valid_luld_event_expect_valid_result(valid_symbol, va
49
49
 
50
50
  # Assert
51
51
  assert len(result) == 1
52
- # assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
53
- # assert result[0].cache_ttl == MarketDataCacheTTL.HALTS.value
52
+ assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
53
+ assert result[0].cache_ttl == MarketDataCacheTTL.HALTS.value
54
54
  assert result[0].publish_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
55
55
  assert result[0].data == data
56
56
 
@@ -172,7 +172,7 @@ def test_handle_luld_event_happy_path(valid_symbol, valid_luld_data):
172
172
  assert result[0].data == valid_luld_data
173
173
 
174
174
 
175
- def test_handle_equity_agg_event_happy_path(valid_symbol, valid_equity_agg_data):
175
+ def test_handle_equity_agg_event_with_no_cache_happy_path(valid_symbol, valid_equity_agg_data):
176
176
  # Arrange
177
177
  sut = MassiveDataAnalyzer()
178
178
 
@@ -181,13 +181,28 @@ def test_handle_equity_agg_event_happy_path(valid_symbol, valid_equity_agg_data)
181
181
 
182
182
  # Assert
183
183
  assert len(result) == 1
184
- # assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
185
- # assert result[0].cache_ttl == MarketDataCacheTTL.AGGREGATE.value
184
+ assert result[0].cache_key is None
185
+ assert result[0].cache_ttl == 0
186
+ assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
187
+ assert result[0].data == valid_equity_agg_data
188
+
189
+
190
+ def test_handle_equity_agg_event_with_cache_happy_path(valid_symbol, valid_equity_agg_data):
191
+ # Arrange
192
+ sut = MassiveDataAnalyzer(cache_agg_event=True)
193
+
194
+ # Act
195
+ result = sut.handle_equity_agg_event(data=valid_equity_agg_data, symbol=valid_symbol)
196
+
197
+ # Assert
198
+ assert len(result) == 1
199
+ assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
200
+ assert result[0].cache_ttl == MarketDataCacheTTL.AGGREGATE.value
186
201
  assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
187
202
  assert result[0].data == valid_equity_agg_data
188
203
 
189
204
 
190
- def test_handle_equity_trade_event_happy_path(valid_symbol, valid_equity_trade_data):
205
+ def test_handle_equity_trade_event_with_no_cache_happy_path(valid_symbol, valid_equity_trade_data):
191
206
  # Arrange
192
207
  sut = MassiveDataAnalyzer()
193
208
 
@@ -196,13 +211,28 @@ def test_handle_equity_trade_event_happy_path(valid_symbol, valid_equity_trade_d
196
211
 
197
212
  # Assert
198
213
  assert len(result) == 1
199
- # assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
200
- # assert result[0].cache_ttl == MarketDataCacheTTL.TRADES.value
214
+ assert result[0].cache_key is None
215
+ assert result[0].cache_ttl == 0
216
+ assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
217
+ assert result[0].data == valid_equity_trade_data
218
+
219
+
220
+ def test_handle_equity_trade_event_with_cache_happy_path(valid_symbol, valid_equity_trade_data):
221
+ # Arrange
222
+ sut = MassiveDataAnalyzer(cache_trade_event=True)
223
+
224
+ # Act
225
+ result = sut.handle_equity_trade_event(data=valid_equity_trade_data, symbol=valid_symbol)
226
+
227
+ # Assert
228
+ assert len(result) == 1
229
+ assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
230
+ assert result[0].cache_ttl == MarketDataCacheTTL.TRADES.value
201
231
  assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
202
232
  assert result[0].data == valid_equity_trade_data
203
233
 
204
234
 
205
- def test_handle_equity_quote_event_happy_path(valid_symbol, valid_equity_quote_data):
235
+ def test_handle_equity_quote_event_with_no_cache_happy_path(valid_symbol, valid_equity_quote_data):
206
236
  # Arrange
207
237
  sut = MassiveDataAnalyzer()
208
238
 
@@ -211,8 +241,23 @@ def test_handle_equity_quote_event_happy_path(valid_symbol, valid_equity_quote_d
211
241
 
212
242
  # Assert
213
243
  assert len(result) == 1
214
- # assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
215
- # assert result[0].cache_ttl == MarketDataCacheTTL.QUOTES.value
244
+ assert result[0].cache_key is None
245
+ assert result[0].cache_ttl == 0
246
+ assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
247
+ assert result[0].data == valid_equity_quote_data
248
+
249
+
250
+ def test_handle_equity_quote_event_with_cache_happy_path(valid_symbol, valid_equity_quote_data):
251
+ # Arrange
252
+ sut = MassiveDataAnalyzer(cache_quote_event=True)
253
+
254
+ # Act
255
+ result = sut.handle_equity_quote_event(data=valid_equity_quote_data, symbol=valid_symbol)
256
+
257
+ # Assert
258
+ assert len(result) == 1
259
+ assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
260
+ assert result[0].cache_ttl == MarketDataCacheTTL.QUOTES.value
216
261
  assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
217
262
  assert result[0].data == valid_equity_quote_data
218
263
 
@@ -5,7 +5,7 @@ from unittest.mock import patch, MagicMock, AsyncMock
5
5
  import pytest
6
6
 
7
7
  from kuhl_haus.mdp.analyzers.top_stocks import TopStocksAnalyzer
8
- from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
8
+ from kuhl_haus.mdp.data.top_stocks_cache_item import TopStocksCacheItem
9
9
  from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
10
10
 
11
11
 
@@ -65,7 +65,7 @@ async def test_rehydrate_no_data(mock_zoneinfo, top_stocks_analyzer, mock_logger
65
65
  # Configure ZoneInfo mock to return timezone.utc so astimezone works properly
66
66
  mock_zoneinfo.return_value = timezone.utc
67
67
  top_stocks_analyzer.logger = mock_logger
68
- top_stocks_analyzer.cache.get_cache.return_value = None
68
+ top_stocks_analyzer.cache.read.return_value = None
69
69
 
70
70
  # Act
71
71
  _ = await top_stocks_analyzer.rehydrate()
@@ -84,7 +84,7 @@ async def test_rehydrate_outside_trading_hours(mock_zoneinfo, top_stocks_analyze
84
84
  mock_zoneinfo.return_value = timezone.utc
85
85
  top_stocks_analyzer.logger = mock_logger
86
86
  data = {"day_start_time": 1672531200}
87
- top_stocks_analyzer.cache.get_cache.return_value = data
87
+ top_stocks_analyzer.cache.read.return_value = data
88
88
 
89
89
  # Act
90
90
  await top_stocks_analyzer.rehydrate()
@@ -105,7 +105,7 @@ async def test_rehydrate_within_trading_hours(mock_zoneinfo, top_stocks_analyzer
105
105
  # Configure ZoneInfo mock to return timezone.utc so astimezone works properly
106
106
  mock_zoneinfo.return_value = timezone.utc
107
107
  data = {"day_start_time": 1672531200}
108
- top_stocks_analyzer.cache.get_cache.return_value = data
108
+ top_stocks_analyzer.cache.read.return_value = data
109
109
  top_stocks_analyzer.logger = mock_logger
110
110
 
111
111
  # Act
@@ -5,8 +5,8 @@ import pytest
5
5
  from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
6
6
  from massive.rest.models import TickerSnapshot
7
7
 
8
- from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
9
- from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
8
+ from kuhl_haus.mdp.enum.market_data_cache_keys import MarketDataCacheKeys
9
+ from kuhl_haus.mdp.enum.market_data_cache_ttl import MarketDataCacheTTL
10
10
 
11
11
 
12
12
  @pytest.fixture
@@ -663,7 +663,7 @@ async def test_cache_data_without_ttl_expect_set_called():
663
663
  test_cache_key = "test:cache:key"
664
664
 
665
665
  # Act
666
- await sut.cache_data(data=test_data, cache_key=test_cache_key, cache_ttl=0)
666
+ await sut.write(data=test_data, cache_key=test_cache_key, cache_ttl=0)
667
667
 
668
668
  # Assert
669
669
  mock_redis_client.set.assert_awaited_once_with(test_cache_key, json.dumps(test_data))
@@ -681,7 +681,7 @@ async def test_publish_data_expect_publish_called():
681
681
  test_publish_key = "market:updates:TEST"
682
682
 
683
683
  # Act
684
- await sut.publish_data(data=test_data, publish_key=test_publish_key)
684
+ await sut.broadcast(data=test_data, publish_key=test_publish_key)
685
685
 
686
686
  # Assert
687
687
  mock_redis_client.publish.assert_awaited_once_with(test_publish_key, json.dumps(test_data))
@@ -844,7 +844,7 @@ async def test_delete_cache_with_existing_key_expect_cache_deleted():
844
844
  test_cache_key = "test:cache:key"
845
845
 
846
846
  # Act
847
- await sut.delete_cache(test_cache_key)
847
+ await sut.delete(test_cache_key)
848
848
 
849
849
  # Assert
850
850
  mock_redis_client.delete.assert_awaited_once_with(test_cache_key)
@@ -861,7 +861,7 @@ async def test_delete_cache_with_redis_error_expect_error_logged(mock_logger):
861
861
  test_cache_key = "test:cache:key"
862
862
 
863
863
  # Act
864
- await sut.delete_cache(test_cache_key)
864
+ await sut.delete(test_cache_key)
865
865
 
866
866
  # Assert
867
867
  mock_redis_client.delete.assert_awaited_once_with(test_cache_key)
@@ -878,7 +878,7 @@ async def test_delete_cache_with_successful_deletion_expect_info_logged(mock_log
878
878
  test_cache_key = "test:cache:key"
879
879
 
880
880
  # Act
881
- await sut.delete_cache(test_cache_key)
881
+ await sut.delete(test_cache_key)
882
882
 
883
883
  # Assert
884
884
  mock_redis_client.delete.assert_awaited_once_with(test_cache_key)
@@ -2,7 +2,7 @@
2
2
  import unittest
3
3
  from collections import defaultdict
4
4
 
5
- from src.kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
5
+ from kuhl_haus.mdp.data.top_stocks_cache_item import TopStocksCacheItem
6
6
 
7
7
 
8
8
  class TestTopStocksCacheItem(unittest.TestCase):
@@ -4,7 +4,7 @@ import unittest
4
4
  from unittest.mock import MagicMock
5
5
 
6
6
  from kuhl_haus.mdp.helpers.queue_name_resolver import QueueNameResolver
7
- from kuhl_haus.mdp.models.massive_data_queue import MassiveDataQueue
7
+ from kuhl_haus.mdp.enum.massive_data_queue import MassiveDataQueue
8
8
  from massive.websocket.models import EquityAgg, EquityTrade, EquityQuote, LimitUpLimitDown, WebSocketMessage
9
9
 
10
10
 
@@ -11,7 +11,7 @@ from massive.websocket.models import (
11
11
  LimitUpLimitDown,
12
12
  EventType,
13
13
  )
14
- from src.kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
14
+ from kuhl_haus.mdp.helpers.web_socket_message_serde import WebSocketMessageSerde
15
15
 
16
16
 
17
17
  class TestWebSocketMessageSerde(unittest.TestCase):
@@ -1,93 +0,0 @@
1
- import logging
2
- from time import time
3
- from typing import List, Optional
4
- from massive.websocket.models import EventType
5
-
6
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
- from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
- from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
9
-
10
-
11
- class MassiveDataAnalyzer:
12
- def __init__(self):
13
- self.logger = logging.getLogger(__name__)
14
- self.event_handlers = {
15
- EventType.LimitUpLimitDown.value: self.handle_luld_event,
16
- EventType.EquityAgg.value: self.handle_equity_agg_event,
17
- EventType.EquityAggMin.value: self.handle_equity_agg_event,
18
- EventType.EquityTrade.value: self.handle_equity_trade_event,
19
- EventType.EquityQuote.value: self.handle_equity_quote_event,
20
- }
21
-
22
- def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
23
- """
24
- Process raw market data message
25
-
26
- Args:
27
- data: serialized message from Massive/Polygon.io
28
-
29
- Returns:
30
- Processed result dict or None if message should be discarded
31
- """
32
- if "event_type" not in data:
33
- self.logger.info("Message missing 'event_type'")
34
- return self.handle_unknown_event(data)
35
- event_type = data.get("event_type")
36
-
37
- if "symbol" not in data:
38
- self.logger.info("Message missing 'symbol'")
39
- return self.handle_unknown_event(data)
40
- symbol = data.get("symbol")
41
-
42
- if event_type in self.event_handlers:
43
- return self.event_handlers[event_type](**{"data": data, "symbol": symbol})
44
- else:
45
- self.logger.warning(f"Unsupported message type: {event_type}")
46
- return self.handle_unknown_event(data)
47
-
48
- @staticmethod
49
- def handle_luld_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
50
- return [MarketDataAnalyzerResult(
51
- data=data,
52
- cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
53
- cache_ttl=MarketDataCacheTTL.HALTS.value,
54
- publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
55
- )]
56
-
57
- @staticmethod
58
- def handle_equity_agg_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
- return [MarketDataAnalyzerResult(
60
- data=data,
61
- # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
62
- # cache_ttl=MarketDataCacheTTL.AGGREGATE.value,
63
- publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
64
- )]
65
-
66
- @staticmethod
67
- def handle_equity_trade_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
68
- return [MarketDataAnalyzerResult(
69
- data=data,
70
- # cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
71
- # cache_ttl=MarketDataCacheTTL.TRADES.value,
72
- publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
73
- )]
74
-
75
- @staticmethod
76
- def handle_equity_quote_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
77
- return [MarketDataAnalyzerResult(
78
- data=data,
79
- # cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
80
- # cache_ttl=MarketDataCacheTTL.QUOTES.value,
81
- publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
82
- )]
83
-
84
- @staticmethod
85
- def handle_unknown_event(data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
86
- timestamp = f"{time()}".replace('.','')
87
- cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
88
- return [MarketDataAnalyzerResult(
89
- data=data,
90
- cache_key=cache_key,
91
- cache_ttl=MarketDataCacheTTL.UNKNOWN.value,
92
- publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
93
- )]
File without changes
File without changes