kuhl-haus-mdp 0.1.3__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/PKG-INFO +9 -7
  2. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/README.md +1 -1
  3. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/pyproject.toml +8 -6
  4. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/analyzers/analyzer.py +5 -4
  5. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/analyzers/top_stocks.py +13 -54
  6. kuhl_haus_mdp-0.1.6/src/kuhl_haus/mdp/components/market_data_cache.py +143 -0
  7. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/components/market_data_scanner.py +21 -5
  8. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/integ/massive_data_processor.py +1 -4
  9. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/market_data_cache_keys.py +3 -0
  10. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/market_data_cache_ttl.py +1 -0
  11. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/analyzers/test_massive_data_analyzer.py +2 -2
  12. kuhl_haus_mdp-0.1.6/tests/analyzers/test_top_stocks_rehydrate.py +117 -0
  13. kuhl_haus_mdp-0.1.6/tests/components/test_market_data_cache.py +708 -0
  14. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/components/test_market_data_scanner.py +8 -18
  15. kuhl_haus_mdp-0.1.6/tests/helpers/test_utils.py +93 -0
  16. kuhl_haus_mdp-0.1.3/src/kuhl_haus/mdp/components/market_data_cache.py +0 -29
  17. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/LICENSE.txt +0 -0
  18. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/__init__.py +0 -0
  19. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/analyzers/__init__.py +0 -0
  20. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +0 -0
  21. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/components/__init__.py +0 -0
  22. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/components/widget_data_service.py +0 -0
  23. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/helpers/__init__.py +0 -0
  24. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/helpers/process_manager.py +0 -0
  25. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/helpers/queue_name_resolver.py +0 -0
  26. {kuhl_haus_mdp-0.1.3/src/kuhl_haus/mdp/integ → kuhl_haus_mdp-0.1.6/src/kuhl_haus/mdp/helpers}/utils.py +0 -0
  27. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/integ/__init__.py +0 -0
  28. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/integ/massive_data_listener.py +0 -0
  29. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/integ/massive_data_queues.py +0 -0
  30. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/integ/web_socket_message_serde.py +0 -0
  31. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/__init__.py +0 -0
  32. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/market_data_analyzer_result.py +0 -0
  33. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/market_data_pubsub_keys.py +0 -0
  34. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/market_data_scanner_names.py +0 -0
  35. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/massive_data_queue.py +0 -0
  36. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/src/kuhl_haus/mdp/models/top_stocks_cache_item.py +0 -0
  37. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/__init__.py +0 -0
  38. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/analyzers/__init__.py +0 -0
  39. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/components/__init__.py +0 -0
  40. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/components/test_widget_data_service.py +0 -0
  41. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/helpers/__init__.py +0 -0
  42. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/helpers/test_process_manager.py +0 -0
  43. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/helpers/test_queue_name_resolver.py +0 -0
  44. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/integ/__init__.py +0 -0
  45. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/integ/test_web_socket_message_serde.py +0 -0
  46. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/models/__init__.py +0 -0
  47. {kuhl_haus_mdp-0.1.3 → kuhl_haus_mdp-0.1.6}/tests/models/test_top_stocks_cache_item.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.3
3
+ Version: 0.1.6
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -33,20 +33,22 @@ Project-URL: Source, https://github.com/kuhl-haus/kuhl-haus-mdp.git
33
33
  Project-URL: Changelog, https://github.com/kuhl-haus/kuhl-haus-mdp/commits
34
34
  Project-URL: Tracker, https://github.com/kuhl-haus/kuhl-haus-mdp/issues
35
35
  Requires-Python: <3.13,>=3.9.21
36
- Requires-Dist: websockets
36
+ Requires-Dist: aiohttp
37
37
  Requires-Dist: aio-pika
38
- Requires-Dist: redis[asyncio]
39
- Requires-Dist: tenacity
40
38
  Requires-Dist: fastapi
41
- Requires-Dist: uvicorn[standard]
39
+ Requires-Dist: massive
42
40
  Requires-Dist: pydantic-settings
43
41
  Requires-Dist: python-dotenv
44
- Requires-Dist: massive
42
+ Requires-Dist: redis[asyncio]
43
+ Requires-Dist: tenacity
44
+ Requires-Dist: uvicorn[standard]
45
+ Requires-Dist: websockets
45
46
  Provides-Extra: testing
46
47
  Requires-Dist: setuptools; extra == "testing"
47
48
  Requires-Dist: pdm-backend; extra == "testing"
48
49
  Requires-Dist: pytest; extra == "testing"
49
50
  Requires-Dist: pytest-cov; extra == "testing"
51
+ Requires-Dist: pytest-asyncio; extra == "testing"
50
52
  Description-Content-Type: text/markdown
51
53
 
52
54
  <!-- These are examples of badges you might want to add to your README:
@@ -66,7 +68,7 @@ Description-Content-Type: text/markdown
66
68
  [![codecov](https://codecov.io/gh/kuhl-haus/kuhl-haus-mdp/branch/mainline/graph/badge.svg)](https://codecov.io/gh/kuhl-haus/kuhl-haus-mdp)
67
69
  [![GitHub issues](https://img.shields.io/github/issues/kuhl-haus/kuhl-haus-mdp)](https://github.com/kuhl-haus/kuhl-haus-mdp/issues)
68
70
  [![GitHub pull requests](https://img.shields.io/github/issues-pr/kuhl-haus/kuhl-haus-mdp)](https://github.com/kuhl-haus/kuhl-haus-mdp/pulls)
69
-
71
+ [![Documentation](https://readthedocs.org/projects/kuhl-haus-mdp/badge/?version=latest)](https://kuhl-haus-mdp.readthedocs.io/en/latest/)
70
72
 
71
73
  # kuhl-haus-mdp
72
74
 
@@ -15,7 +15,7 @@
15
15
  [![codecov](https://codecov.io/gh/kuhl-haus/kuhl-haus-mdp/branch/mainline/graph/badge.svg)](https://codecov.io/gh/kuhl-haus/kuhl-haus-mdp)
16
16
  [![GitHub issues](https://img.shields.io/github/issues/kuhl-haus/kuhl-haus-mdp)](https://github.com/kuhl-haus/kuhl-haus-mdp/issues)
17
17
  [![GitHub pull requests](https://img.shields.io/github/issues-pr/kuhl-haus/kuhl-haus-mdp)](https://github.com/kuhl-haus/kuhl-haus-mdp/pulls)
18
-
18
+ [![Documentation](https://readthedocs.org/projects/kuhl-haus-mdp/badge/?version=latest)](https://kuhl-haus-mdp.readthedocs.io/en/latest/)
19
19
 
20
20
  # kuhl-haus-mdp
21
21
 
@@ -19,17 +19,18 @@ classifiers = [
19
19
  "Programming Language :: Python",
20
20
  ]
21
21
  dependencies = [
22
- "websockets",
22
+ "aiohttp",
23
23
  "aio-pika",
24
- "redis[asyncio]",
25
- "tenacity",
26
24
  "fastapi",
27
- "uvicorn[standard]",
25
+ "massive",
28
26
  "pydantic-settings",
29
27
  "python-dotenv",
30
- "massive",
28
+ "redis[asyncio]",
29
+ "tenacity",
30
+ "uvicorn[standard]",
31
+ "websockets",
31
32
  ]
32
- version = "0.1.3"
33
+ version = "0.1.6"
33
34
 
34
35
  [project.license]
35
36
  file = "LICENSE.txt"
@@ -47,6 +48,7 @@ testing = [
47
48
  "pdm-backend",
48
49
  "pytest",
49
50
  "pytest-cov",
51
+ "pytest-asyncio",
50
52
  ]
51
53
 
52
54
  [tool.setuptools_scm]
@@ -1,14 +1,15 @@
1
1
  from typing import Optional, List
2
2
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
3
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
3
4
 
4
5
 
5
6
  class Analyzer:
6
- cache_key: str
7
+ cache: MarketDataCache
7
8
 
8
- def __init__(self, cache_key: str, **kwargs):
9
- self.cache_key = cache_key
9
+ def __init__(self, cache: MarketDataCache, **kwargs):
10
+ self.cache = cache
10
11
 
11
- async def rehydrate(self, data: dict):
12
+ async def rehydrate(self):
12
13
  pass
13
14
 
14
15
  async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
@@ -16,6 +16,7 @@ from massive.websocket.models import (
16
16
  )
17
17
 
18
18
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
19
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
19
20
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
20
21
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
21
22
  from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
@@ -24,22 +25,16 @@ from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
24
25
 
25
26
  class TopStocksAnalyzer(Analyzer):
26
27
 
27
- def __init__(self, rest_client: RESTClient, **kwargs):
28
- if "cache_key" not in kwargs:
29
- kwargs["cache_key"] = MarketDataCacheKeys.TOP_STOCKS_SCANNER.value
30
- super().__init__(**kwargs)
31
- self.rest_client = rest_client
28
+ def __init__(self, cache: MarketDataCache, **kwargs):
29
+ super().__init__(cache=cache, **kwargs)
30
+ self.cache = cache
31
+ self.cache_key = MarketDataCacheKeys.TOP_STOCKS_SCANNER.value
32
32
  self.logger = logging.getLogger(__name__)
33
33
  self.cache_item = TopStocksCacheItem()
34
34
  self.last_update_time = 0
35
35
  self.pre_market_reset = False
36
36
 
37
- async def rehydrate(self, data: dict):
38
- if not data:
39
- self.cache_item = TopStocksCacheItem()
40
- self.logger.info("No data to rehydrate TopStocksCacheItem.")
41
- return
42
-
37
+ async def rehydrate(self):
43
38
  # Get current time in UTC, then convert to Eastern Time
44
39
  utc_now = datetime.now(timezone.utc)
45
40
  et_now = utc_now.astimezone(ZoneInfo("America/New_York"))
@@ -51,6 +46,11 @@ class TopStocksAnalyzer(Analyzer):
51
46
  self.cache_item = TopStocksCacheItem()
52
47
  self.logger.info(f"Outside market hours ({et_now.strftime('%H:%M:%S %Z')}), clearing cache.")
53
48
  return
49
+ data = await self.cache.get_cache(self.cache_key)
50
+ if not data:
51
+ self.cache_item = TopStocksCacheItem()
52
+ self.logger.info("No data to rehydrate TopStocksCacheItem.")
53
+ return
54
54
  self.cache_item = TopStocksCacheItem(**data)
55
55
  self.logger.info("Rehydrated TopStocksCacheItem")
56
56
 
@@ -135,7 +135,7 @@ class TopStocksAnalyzer(Analyzer):
135
135
  prev_day_vwap = 0
136
136
  while retry_count < max_tries:
137
137
  try:
138
- snapshot = await self.get_ticker_snapshot(event.symbol)
138
+ snapshot = await self.cache.get_ticker_snapshot(event.symbol)
139
139
  prev_day_close = snapshot.prev_day.close
140
140
  prev_day_volume = snapshot.prev_day.volume
141
141
  prev_day_vwap = snapshot.prev_day.vwap
@@ -153,7 +153,7 @@ class TopStocksAnalyzer(Analyzer):
153
153
  avg_volume = 0
154
154
  while retry_count < max_tries:
155
155
  try:
156
- avg_volume = await self.get_avg_volume(event.symbol)
156
+ avg_volume = await self.cache.get_avg_volume(event.symbol)
157
157
  break
158
158
  except (BadResponse, ZeroDivisionError) as e:
159
159
  self.logger.error(f"Error getting average volume for {event.symbol}: {repr(e)}", exc_info=e, stack_info=True)
@@ -217,44 +217,3 @@ class TopStocksAnalyzer(Analyzer):
217
217
  "start_timestamp": event.start_timestamp,
218
218
  "end_timestamp": event.end_timestamp,
219
219
  }
220
-
221
- async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
222
- self.logger.debug(f"Getting snapshot for {ticker}")
223
- result: TickerSnapshot = self.rest_client.get_snapshot_ticker(
224
- market_type="stocks",
225
- ticker=ticker
226
- )
227
- self.logger.debug(f"Snapshot result: {result}")
228
- return result
229
-
230
- async def get_avg_volume(self, ticker: str):
231
- self.logger.debug(f"Getting average volume for {ticker}")
232
- # Get date string in YYYY-MM-DD format
233
- end_date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
234
- # Get date from 30 trading sessions ago in YYYY-MM-DD format
235
- start_date = (datetime.now(timezone.utc) - timedelta(days=42)).strftime("%Y-%m-%d")
236
-
237
- result: Iterator[Agg] = self.rest_client.list_aggs(
238
- ticker=ticker,
239
- multiplier=1,
240
- timespan="day",
241
- from_=start_date,
242
- to=end_date,
243
- adjusted=True,
244
- sort="desc"
245
- )
246
- self.logger.debug(f"average volume result: {result}")
247
-
248
- total_volume = 0
249
- max_periods = 30
250
- periods_calculated = 0
251
- for agg in result:
252
- if periods_calculated < max_periods:
253
- total_volume += agg.volume
254
- periods_calculated += 1
255
- else:
256
- break
257
- avg_volume = total_volume / periods_calculated
258
-
259
- self.logger.debug(f"average volume {ticker}: {avg_volume}")
260
- return avg_volume
@@ -0,0 +1,143 @@
1
+ import json
2
+ import logging
3
+ from typing import Any, Optional, Iterator, List
4
+
5
+ import aiohttp
6
+ import redis.asyncio as aioredis
7
+ from massive.rest import RESTClient
8
+ from massive.rest.models import (
9
+ TickerSnapshot,
10
+ FinancialRatio,
11
+ )
12
+
13
+ from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
14
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
15
+
16
+
17
+ class MarketDataCache:
18
+ def __init__(self, rest_client: RESTClient, redis_client: aioredis.Redis, massive_api_key: str):
19
+ self.logger = logging.getLogger(__name__)
20
+ self.rest_client = rest_client
21
+ self.massive_api_key = massive_api_key
22
+ self.redis_client = redis_client
23
+ self.http_session = None
24
+
25
+ async def get_cache(self, cache_key: str) -> Optional[dict]:
26
+ """Fetch current value from Redis cache (for snapshot requests)."""
27
+ value = await self.redis_client.get(cache_key)
28
+ if value:
29
+ return json.loads(value)
30
+ return None
31
+
32
+ async def cache_data(self, data: Any, cache_key: str, cache_ttl: int = 0):
33
+ if cache_ttl > 0:
34
+ await self.redis_client.setex(cache_key, cache_ttl, json.dumps(data))
35
+ else:
36
+ await self.redis_client.set(cache_key, json.dumps(data))
37
+ self.logger.debug(f"Cached data for {cache_key}")
38
+
39
+ async def publish_data(self, data: Any, publish_key: str = None):
40
+ await self.redis_client.publish(publish_key, json.dumps(data))
41
+ self.logger.debug(f"Published data for {publish_key}")
42
+
43
+ async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
44
+ self.logger.debug(f"Getting snapshot for {ticker}")
45
+ cache_key = f"{MarketDataCacheKeys.TICKER_SNAPSHOTS.value}:{ticker}"
46
+ result = await self.get_cache(cache_key=cache_key)
47
+ if result:
48
+ snapshot = TickerSnapshot.from_dict(**result)
49
+ else:
50
+ snapshot: TickerSnapshot = self.rest_client.get_snapshot_ticker(
51
+ market_type="stocks",
52
+ ticker=ticker
53
+ )
54
+ self.logger.debug(f"Snapshot result: {snapshot}")
55
+ await self.cache_data(
56
+ data=snapshot,
57
+ cache_key=cache_key,
58
+ cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value
59
+ )
60
+ return snapshot
61
+
62
+ async def get_avg_volume(self, ticker: str):
63
+ self.logger.debug(f"Getting average volume for {ticker}")
64
+ cache_key = f"{MarketDataCacheKeys.TICKER_AVG_VOLUME.value}:{ticker}"
65
+ avg_volume = await self.get_cache(cache_key=cache_key)
66
+ if avg_volume:
67
+ self.logger.debug(f"Returning cached value for {ticker}: {avg_volume}")
68
+ return avg_volume
69
+
70
+ results: Iterator[FinancialRatio] = self.rest_client.list_financials_ratios(ticker=ticker)
71
+ ratios: List[FinancialRatio] = []
72
+ for financial_ratio in results:
73
+ ratios.append(financial_ratio)
74
+ if len(ratios) == 1:
75
+ avg_volume = ratios[0].average_volume
76
+ else:
77
+ raise Exception(f"Unexpected number of financial ratios for {ticker}: {len(ratios)}")
78
+
79
+ self.logger.debug(f"average volume {ticker}: {avg_volume}")
80
+ await self.cache_data(
81
+ data=avg_volume,
82
+ cache_key=cache_key,
83
+ cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
84
+ )
85
+ return avg_volume
86
+
87
+ async def get_free_float(self, ticker: str):
88
+ self.logger.debug(f"Getting free float for {ticker}")
89
+ cache_key = f"{MarketDataCacheKeys.TICKER_FREE_FLOAT.value}:{ticker}"
90
+ free_float = await self.get_cache(cache_key=cache_key)
91
+ if free_float:
92
+ self.logger.debug(f"Returning cached value for {ticker}: {free_float}")
93
+ return free_float
94
+
95
+ # NOTE: This endpoint is experimental and the interface may change.
96
+ # https://massive.com/docs/rest/stocks/fundamentals/float
97
+ url = f"https://api.massive.com/stocks/vX/float"
98
+ params = {
99
+ "ticker": ticker,
100
+ "apiKey": self.massive_api_key
101
+ }
102
+
103
+ session = await self.get_http_session()
104
+ try:
105
+ async with session.get(url, params=params, timeout=aiohttp.ClientTimeout(total=10)) as response:
106
+ response.raise_for_status()
107
+ data = await response.json()
108
+
109
+ # Extract free_float from response
110
+ if data.get("status") == "OK" and data.get("results") is not None:
111
+ results = data["results"]
112
+ if len(results) > 0:
113
+ free_float = results[0].get("free_float")
114
+ else:
115
+ raise Exception(f"No free float data returned for {ticker}")
116
+ else:
117
+ raise Exception(f"Invalid response from Massive API for {ticker}: {data}")
118
+
119
+ except aiohttp.ClientError as e:
120
+ self.logger.error(f"HTTP error fetching free float for {ticker}: {e}")
121
+ raise
122
+ except Exception as e:
123
+ self.logger.error(f"Error fetching free float for {ticker}: {e}")
124
+ raise
125
+
126
+ self.logger.debug(f"free float {ticker}: {free_float}")
127
+ await self.cache_data(
128
+ data=free_float,
129
+ cache_key=cache_key,
130
+ cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
131
+ )
132
+ return free_float
133
+
134
+ async def get_http_session(self) -> aiohttp.ClientSession:
135
+ """Get or create aiohttp session for async HTTP requests."""
136
+ if self.http_session is None or self.http_session.closed:
137
+ self.http_session = aiohttp.ClientSession()
138
+ return self.http_session
139
+
140
+ async def close(self):
141
+ """Close aiohttp session."""
142
+ if self.http_session and not self.http_session.closed:
143
+ await self.http_session.close()
@@ -1,13 +1,16 @@
1
1
  import asyncio
2
2
  import json
3
3
  import logging
4
- from typing import Union, Optional, List
4
+ from typing import Any, Union, Optional, List
5
5
 
6
6
  import redis.asyncio as aioredis
7
7
  from redis.exceptions import ConnectionError
8
8
 
9
+ from massive.rest import RESTClient
10
+
9
11
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
10
12
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
13
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
11
14
 
12
15
 
13
16
  class MarketDataScanner:
@@ -18,11 +21,14 @@ class MarketDataScanner:
18
21
  error: int
19
22
  restarts: int
20
23
 
21
- def __init__(self, redis_url: str, analyzer: Analyzer, subscriptions: List[str]):
24
+ def __init__(self, redis_url: str, massive_api_key: str, subscriptions: List[str], analyzer_class: Any):
22
25
  self.redis_url = redis_url
23
- self.analyzer = analyzer
26
+ self.massive_api_key = massive_api_key
24
27
  self.logger = logging.getLogger(__name__)
25
28
 
29
+ self.analyzer: Analyzer = None
30
+ self.analyzer_class = analyzer_class
31
+
26
32
  # Connection objects
27
33
  self.redis_client = None # : aioredis.Redis = None
28
34
  self.pubsub_client: Optional[aioredis.client.PubSub] = None
@@ -30,6 +36,7 @@ class MarketDataScanner:
30
36
  # State
31
37
  self.mdc_connected = False
32
38
  self.running = False
39
+ self.mdc: Optional[MarketDataCache] = None
33
40
 
34
41
  self.subscriptions: List[str] = subscriptions
35
42
  self._pubsub_task: Union[asyncio.Task, None] = None
@@ -48,9 +55,9 @@ class MarketDataScanner:
48
55
  await self.connect()
49
56
  self.pubsub_client = self.redis_client.pubsub()
50
57
 
51
- scanner_cache = await self.get_cache(self.analyzer.cache_key)
58
+ self.analyzer = self.analyzer_class(cache=self.mdc)
52
59
  self.logger.info(f"mds rehydrating from cache")
53
- await self.analyzer.rehydrate(scanner_cache)
60
+ await self.analyzer.rehydrate()
54
61
  self.logger.info("mds rehydration complete")
55
62
 
56
63
  for subscription in self.subscriptions:
@@ -73,6 +80,10 @@ class MarketDataScanner:
73
80
  pass
74
81
  self._pubsub_task = None
75
82
 
83
+ if self.mdc:
84
+ await self.mdc.close()
85
+ self.mdc = None
86
+
76
87
  if self.pubsub_client:
77
88
  for subscription in self.subscriptions:
78
89
  if subscription.endswith("*"):
@@ -104,6 +115,11 @@ class MarketDataScanner:
104
115
 
105
116
  # Test Redis connection
106
117
  await self.redis_client.ping()
118
+ self.mdc = MarketDataCache(
119
+ rest_client=RESTClient(api_key=self.massive_api_key),
120
+ redis_client=self.redis_client,
121
+ massive_api_key=self.massive_api_key
122
+ )
107
123
  self.mdc_connected = True
108
124
  self.logger.debug(f"Connected to Redis: {self.redis_url}")
109
125
  except Exception as e:
@@ -1,17 +1,14 @@
1
1
  import asyncio
2
2
  import json
3
3
  import logging
4
- from typing import Dict
5
4
 
6
5
  import aio_pika
7
- import redis
8
6
  import redis.asyncio as aioredis
9
7
  from aio_pika.abc import AbstractIncomingMessage
10
- from aio_pika.exceptions import AMQPConnectionError
11
8
 
12
9
  from kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
13
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
14
10
  from kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
11
+ from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
15
12
 
16
13
 
17
14
  class MassiveDataProcessor:
@@ -16,6 +16,9 @@ class MarketDataCacheKeys(Enum):
16
16
 
17
17
  # MARKET DATA CACHE
18
18
  DAILY_AGGREGATES = 'aggregate:daily'
19
+ TICKER_SNAPSHOTS = 'snapshots'
20
+ TICKER_AVG_VOLUME = 'avg_volume'
21
+ TICKER_FREE_FLOAT = 'free_float'
19
22
 
20
23
  # MARKET DATA PROCESSOR CACHE
21
24
  TOP_TRADES_SCANNER = f'cache:{MarketDataScannerNames.TOP_TRADES.value}'
@@ -8,6 +8,7 @@ class MarketDataCacheTTL(Enum):
8
8
  FOUR_HOURS = 14400
9
9
  SIX_HOURS = 21600
10
10
  EIGHT_HOURS = 28800
11
+ TWELVE_HOURS = 43200
11
12
 
12
13
  # Days
13
14
  ONE_DAY = 86400
@@ -2,10 +2,10 @@ from unittest.mock import MagicMock
2
2
 
3
3
  import pytest
4
4
  from massive.websocket.models import EventType
5
+
6
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
5
7
  from src.kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
6
- from src.kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
8
  from src.kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
- from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
9
9
 
10
10
 
11
11
  @pytest.fixture
@@ -0,0 +1,117 @@
1
+
2
+ from datetime import datetime, timezone
3
+ from unittest.mock import patch, MagicMock, AsyncMock
4
+
5
+ import pytest
6
+
7
+ from kuhl_haus.mdp.analyzers.top_stocks import TopStocksAnalyzer
8
+ from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
9
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
10
+
11
+
12
+ @pytest.fixture
13
+ def mock_market_data_cache():
14
+ mock = MagicMock(spec=MarketDataCache)
15
+ mock.get_cache = AsyncMock()
16
+ return mock
17
+
18
+
19
+ @pytest.fixture
20
+ def top_stocks_analyzer(mock_market_data_cache):
21
+ return TopStocksAnalyzer(cache=mock_market_data_cache)
22
+
23
+
24
+ @pytest.fixture
25
+ def mock_logger():
26
+ return MagicMock()
27
+
28
+
29
+ @pytest.fixture
30
+ def analyzer(mock_market_data_cache, mock_logger):
31
+ """Fixture to set up the TopStocksAnalyzer system under test."""
32
+ sut = TopStocksAnalyzer(cache=mock_market_data_cache)
33
+ sut.logger = mock_logger
34
+ sut.cache_item = TopStocksCacheItem()
35
+ sut.cache_item.day_start_time = datetime(2026, 1, 1, 4, 0, 0, tzinfo=timezone.utc).timestamp()
36
+ sut.last_update_time = 0
37
+ return sut
38
+
39
+
40
+ @pytest.fixture
41
+ def trading_hour_patch():
42
+ # Patch datetime to simulate within trading hours
43
+ patcher = patch("kuhl_haus.mdp.analyzers.top_stocks.datetime", wraps=datetime)
44
+ mocked_datetime = patcher.start()
45
+ mocked_datetime.now.return_value = datetime(2023, 11, 1, 14, 0, 0, tzinfo=timezone.utc) # Wed 14:00 UTC
46
+ yield mocked_datetime
47
+ patcher.stop()
48
+
49
+
50
+ @pytest.fixture
51
+ def outside_trading_hour_patch():
52
+ # Patch datetime to simulate outside trading hours
53
+ patcher = patch("kuhl_haus.mdp.analyzers.top_stocks.datetime", wraps=datetime)
54
+ mocked_datetime = patcher.start()
55
+ mocked_datetime.now.return_value = datetime(2023, 11, 1, 21, 0, 0, tzinfo=timezone.utc) # Wed 21:00 UTC
56
+ yield mocked_datetime
57
+ patcher.stop()
58
+
59
+
60
+ @pytest.mark.asyncio
61
+ @patch("kuhl_haus.mdp.analyzers.top_stocks.ZoneInfo")
62
+ async def test_rehydrate_no_data(mock_zoneinfo, top_stocks_analyzer, mock_logger, trading_hour_patch, mock_market_data_cache):
63
+ """Test rehydrate when no data is passed."""
64
+ # Arrange
65
+ # Configure ZoneInfo mock to return timezone.utc so astimezone works properly
66
+ mock_zoneinfo.return_value = timezone.utc
67
+ top_stocks_analyzer.logger = mock_logger
68
+ top_stocks_analyzer.cache.get_cache.return_value = None
69
+
70
+ # Act
71
+ _ = await top_stocks_analyzer.rehydrate()
72
+
73
+ # Assert
74
+ assert isinstance(top_stocks_analyzer.cache_item, TopStocksCacheItem)
75
+ mock_logger.info.assert_called_once_with("No data to rehydrate TopStocksCacheItem.")
76
+
77
+
78
+ @pytest.mark.asyncio
79
+ @patch("kuhl_haus.mdp.analyzers.top_stocks.ZoneInfo")
80
+ async def test_rehydrate_outside_trading_hours(mock_zoneinfo, top_stocks_analyzer, outside_trading_hour_patch, mock_logger, mock_market_data_cache):
81
+ """Test rehydrate outside trading hours."""
82
+ # Arrange
83
+ # Configure ZoneInfo mock to return timezone.utc so astimezone works properly
84
+ mock_zoneinfo.return_value = timezone.utc
85
+ top_stocks_analyzer.logger = mock_logger
86
+ data = {"day_start_time": 1672531200}
87
+ top_stocks_analyzer.cache.get_cache.return_value = data
88
+
89
+ # Act
90
+ await top_stocks_analyzer.rehydrate()
91
+
92
+ # Assert
93
+ assert isinstance(top_stocks_analyzer.cache_item, TopStocksCacheItem)
94
+ assert top_stocks_analyzer.cache_item.day_start_time == 0.0
95
+ mock_logger.info.assert_called_once_with(
96
+ "Outside market hours (21:00:00 UTC), clearing cache."
97
+ )
98
+
99
+
100
+ @pytest.mark.asyncio
101
+ @patch("kuhl_haus.mdp.analyzers.top_stocks.ZoneInfo")
102
+ async def test_rehydrate_within_trading_hours(mock_zoneinfo, top_stocks_analyzer, trading_hour_patch, mock_logger, mock_market_data_cache):
103
+ """Test rehydrate within trading hours with valid data."""
104
+ # Arrange
105
+ # Configure ZoneInfo mock to return timezone.utc so astimezone works properly
106
+ mock_zoneinfo.return_value = timezone.utc
107
+ data = {"day_start_time": 1672531200}
108
+ top_stocks_analyzer.cache.get_cache.return_value = data
109
+ top_stocks_analyzer.logger = mock_logger
110
+
111
+ # Act
112
+ await top_stocks_analyzer.rehydrate()
113
+
114
+ # Assert
115
+ assert isinstance(top_stocks_analyzer.cache_item, TopStocksCacheItem)
116
+ assert top_stocks_analyzer.cache_item.day_start_time == 1672531200
117
+ mock_logger.info.assert_called_once_with("Rehydrated TopStocksCacheItem")