kuhl-haus-mdp 0.1.9__tar.gz → 0.1.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/PKG-INFO +1 -1
  2. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/pyproject.toml +1 -1
  3. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/analyzers/massive_data_analyzer.py +8 -8
  4. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/analyzers/top_stocks.py +13 -15
  5. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/components/market_data_cache.py +5 -3
  6. kuhl_haus_mdp-0.1.10/src/kuhl_haus/mdp/models/constants.py +24 -0
  7. kuhl_haus_mdp-0.1.10/src/kuhl_haus/mdp/models/market_data_cache_ttl.py +30 -0
  8. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/analyzers/test_massive_data_analyzer.py +18 -18
  9. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/components/test_market_data_cache.py +6 -5
  10. kuhl_haus_mdp-0.1.9/src/kuhl_haus/mdp/models/market_data_cache_ttl.py +0 -20
  11. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/LICENSE.txt +0 -0
  12. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/README.md +0 -0
  13. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/__init__.py +0 -0
  14. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/analyzers/__init__.py +0 -0
  15. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/analyzers/analyzer.py +0 -0
  16. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/components/__init__.py +0 -0
  17. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/components/market_data_scanner.py +0 -0
  18. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/components/widget_data_service.py +0 -0
  19. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/helpers/__init__.py +0 -0
  20. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/helpers/process_manager.py +0 -0
  21. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/helpers/queue_name_resolver.py +0 -0
  22. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/helpers/utils.py +0 -0
  23. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/integ/__init__.py +0 -0
  24. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/integ/massive_data_listener.py +0 -0
  25. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/integ/massive_data_processor.py +0 -0
  26. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/integ/massive_data_queues.py +0 -0
  27. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/integ/web_socket_message_serde.py +0 -0
  28. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/models/__init__.py +0 -0
  29. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/models/market_data_analyzer_result.py +0 -0
  30. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/models/market_data_cache_keys.py +0 -0
  31. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/models/market_data_pubsub_keys.py +0 -0
  32. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/models/market_data_scanner_names.py +0 -0
  33. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/models/massive_data_queue.py +0 -0
  34. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/src/kuhl_haus/mdp/models/top_stocks_cache_item.py +0 -0
  35. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/__init__.py +0 -0
  36. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/analyzers/__init__.py +0 -0
  37. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/analyzers/test_top_stocks_rehydrate.py +0 -0
  38. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/components/__init__.py +0 -0
  39. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/components/test_market_data_scanner.py +0 -0
  40. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/components/test_widget_data_service.py +0 -0
  41. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/helpers/__init__.py +0 -0
  42. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/helpers/test_process_manager.py +0 -0
  43. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/helpers/test_queue_name_resolver.py +0 -0
  44. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/helpers/test_utils.py +0 -0
  45. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/integ/__init__.py +0 -0
  46. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/integ/test_web_socket_message_serde.py +0 -0
  47. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/models/__init__.py +0 -0
  48. {kuhl_haus_mdp-0.1.9 → kuhl_haus_mdp-0.1.10}/tests/models/test_top_stocks_cache_item.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.9
3
+ Version: 0.1.10
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -30,7 +30,7 @@ dependencies = [
30
30
  "uvicorn[standard]",
31
31
  "websockets",
32
32
  ]
33
- version = "0.1.9"
33
+ version = "0.1.10"
34
34
 
35
35
  [project.license]
36
36
  file = "LICENSE.txt"
@@ -50,7 +50,7 @@ class MassiveDataAnalyzer:
50
50
  return [MarketDataAnalyzerResult(
51
51
  data=data,
52
52
  cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
53
- cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
53
+ cache_ttl=MarketDataCacheTTL.HALTS.value,
54
54
  publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
55
55
  )]
56
56
 
@@ -58,8 +58,8 @@ class MassiveDataAnalyzer:
58
58
  def handle_equity_agg_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
59
  return [MarketDataAnalyzerResult(
60
60
  data=data,
61
- cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
62
- cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
61
+ # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
62
+ # cache_ttl=MarketDataCacheTTL.AGGREGATE.value,
63
63
  publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
64
64
  )]
65
65
 
@@ -67,8 +67,8 @@ class MassiveDataAnalyzer:
67
67
  def handle_equity_trade_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
68
68
  return [MarketDataAnalyzerResult(
69
69
  data=data,
70
- cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
71
- cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value,
70
+ # cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
71
+ # cache_ttl=MarketDataCacheTTL.TRADES.value,
72
72
  publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
73
73
  )]
74
74
 
@@ -76,8 +76,8 @@ class MassiveDataAnalyzer:
76
76
  def handle_equity_quote_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
77
77
  return [MarketDataAnalyzerResult(
78
78
  data=data,
79
- cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
80
- cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
79
+ # cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
80
+ # cache_ttl=MarketDataCacheTTL.QUOTES.value,
81
81
  publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
82
82
  )]
83
83
 
@@ -88,6 +88,6 @@ class MassiveDataAnalyzer:
88
88
  return [MarketDataAnalyzerResult(
89
89
  data=data,
90
90
  cache_key=cache_key,
91
- cache_ttl=MarketDataCacheTTL.ONE_DAY.value,
91
+ cache_ttl=MarketDataCacheTTL.UNKNOWN.value,
92
92
  publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
93
93
  )]
@@ -1,15 +1,9 @@
1
1
  import logging
2
2
  import time
3
- from datetime import datetime, timezone, timedelta
4
- from typing import Optional, List, Iterator
3
+ from datetime import datetime, timezone
4
+ from typing import Optional, List
5
5
  from zoneinfo import ZoneInfo
6
6
 
7
- from massive.exceptions import BadResponse
8
- from massive.rest import RESTClient
9
- from massive.rest.models import (
10
- TickerSnapshot,
11
- Agg,
12
- )
13
7
  from massive.websocket.models import (
14
8
  EquityAgg,
15
9
  EventType
@@ -19,6 +13,7 @@ from kuhl_haus.mdp.analyzers.analyzer import Analyzer
19
13
  from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
20
14
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
21
15
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
16
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
22
17
  from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
23
18
  from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
24
19
 
@@ -94,24 +89,24 @@ class TopStocksAnalyzer(Analyzer):
94
89
  MarketDataAnalyzerResult(
95
90
  data=self.cache_item.to_dict(),
96
91
  cache_key=self.cache_key,
97
- cache_ttl=28500, # 7 hours, 55 minutes
92
+ cache_ttl=MarketDataCacheTTL.TOP_STOCKS_SCANNER.value,
98
93
  ),
99
94
  MarketDataAnalyzerResult(
100
95
  data=self.cache_item.top_volume(100),
101
96
  cache_key=MarketDataPubSubKeys.TOP_VOLUME_SCANNER.value,
102
- cache_ttl=259200, # 3 days
97
+ cache_ttl=MarketDataCacheTTL.TOP_VOLUME_SCANNER.value,
103
98
  publish_key=MarketDataPubSubKeys.TOP_VOLUME_SCANNER.value,
104
99
  ),
105
100
  MarketDataAnalyzerResult(
106
101
  data=self.cache_item.top_gainers(500),
107
102
  cache_key=MarketDataPubSubKeys.TOP_GAINERS_SCANNER.value,
108
- cache_ttl=259200, # 3 days
103
+ cache_ttl=MarketDataCacheTTL.TOP_GAINERS_SCANNER.value,
109
104
  publish_key=MarketDataPubSubKeys.TOP_GAINERS_SCANNER.value,
110
105
  ),
111
106
  MarketDataAnalyzerResult(
112
107
  data=self.cache_item.top_gappers(500),
113
108
  cache_key=MarketDataPubSubKeys.TOP_GAPPERS_SCANNER.value,
114
- cache_ttl=259200, # 3 days
109
+ cache_ttl=MarketDataCacheTTL.TOP_GAPPERS_SCANNER.value,
115
110
  publish_key=MarketDataPubSubKeys.TOP_GAPPERS_SCANNER.value,
116
111
  )
117
112
  ]
@@ -141,7 +136,8 @@ class TopStocksAnalyzer(Analyzer):
141
136
  prev_day_volume = snapshot.prev_day.volume
142
137
  prev_day_vwap = snapshot.prev_day.vwap
143
138
  break
144
- except Exception:
139
+ except Exception as e:
140
+ self.logger.error(f"Failed to get snapshot for {event.symbol}: {e}")
145
141
  retry_count += 1
146
142
  if retry_count == max_tries and prev_day_close == 0:
147
143
  self.logger.error(f"Failed to get snapshot for {event.symbol} after {max_tries} tries.")
@@ -155,7 +151,8 @@ class TopStocksAnalyzer(Analyzer):
155
151
  try:
156
152
  avg_volume = await self.cache.get_avg_volume(event.symbol)
157
153
  break
158
- except Exception:
154
+ except Exception as e:
155
+ self.logger.error(f"Failed to get average volume for {event.symbol}: {e}")
159
156
  retry_count += 1
160
157
  if retry_count == max_tries and avg_volume == 0:
161
158
  self.logger.error(f"Failed to get average volume for {event.symbol} after {max_tries} tries.")
@@ -169,7 +166,8 @@ class TopStocksAnalyzer(Analyzer):
169
166
  try:
170
167
  free_float = await self.cache.get_free_float(event.symbol)
171
168
  break
172
- except Exception:
169
+ except Exception as e:
170
+ self.logger.error(f"Failed to get free float for {event.symbol}: {e}")
173
171
  retry_count += 1
174
172
  if retry_count == max_tries and free_float == 0:
175
173
  self.logger.error(f"Failed to get free float for {event.symbol} after {max_tries} tries.")
@@ -61,7 +61,7 @@ class MarketDataCache:
61
61
  await self.cache_data(
62
62
  data=data,
63
63
  cache_key=cache_key,
64
- cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value
64
+ cache_ttl=MarketDataCacheTTL.TICKER_SNAPSHOTS.value
65
65
  )
66
66
  return snapshot
67
67
 
@@ -109,13 +109,15 @@ class MarketDataCache:
109
109
  periods_calculated += 1
110
110
  else:
111
111
  break
112
+ if periods_calculated == 0:
113
+ raise Exception(f"No volume data returned for {ticker}")
112
114
  avg_volume = total_volume / periods_calculated
113
115
 
114
116
  self.logger.info(f"average volume {ticker}: {avg_volume}")
115
117
  await self.cache_data(
116
118
  data=avg_volume,
117
119
  cache_key=cache_key,
118
- cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
120
+ cache_ttl=MarketDataCacheTTL.TICKER_AVG_VOLUME.value
119
121
  )
120
122
  return avg_volume
121
123
 
@@ -162,7 +164,7 @@ class MarketDataCache:
162
164
  await self.cache_data(
163
165
  data=free_float,
164
166
  cache_key=cache_key,
165
- cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
167
+ cache_ttl=MarketDataCacheTTL.TICKER_FREE_FLOAT.value
166
168
  )
167
169
  return free_float
168
170
 
@@ -0,0 +1,24 @@
1
+ # Minutes in seconds
2
+ ONE_MINUTE = 60
3
+ FIVE_MINUTES = 300
4
+ TEN_MINUTES = 600
5
+ FIFTEEN_MINUTES = 900
6
+ TWENTY_MINUTES = 1200
7
+ THIRTY_MINUTES = 1800
8
+
9
+ # Hours in seconds
10
+ ONE_HOUR = 3600
11
+ TWO_HOURS = 7200
12
+ FOUR_HOURS = 14400
13
+ SIX_HOURS = 21600
14
+ EIGHT_HOURS = 28800
15
+ TWELVE_HOURS = 43200
16
+
17
+ # Days in seconds
18
+ ONE_DAY = 86400
19
+ TWO_DAYS = 172800
20
+ THREE_DAYS = 259200
21
+ FOUR_DAYS = 345600
22
+ FIVE_DAYS = 432000
23
+ SIX_DAYS = 518400
24
+ SEVEN_DAYS = 604800
@@ -0,0 +1,30 @@
1
+ from enum import Enum
2
+ from kuhl_haus.mdp.models.constants import (
3
+ EIGHT_HOURS,
4
+ FIVE_MINUTES,
5
+ ONE_DAY,
6
+ ONE_HOUR,
7
+ THREE_DAYS,
8
+ TWELVE_HOURS,
9
+ )
10
+
11
+
12
+ class MarketDataCacheTTL(Enum):
13
+ # Raw market data caches
14
+ AGGREGATE = FIVE_MINUTES
15
+ HALTS = ONE_DAY
16
+ QUOTES = ONE_HOUR
17
+ TRADES = ONE_HOUR
18
+ UNKNOWN = ONE_DAY
19
+
20
+ # Ticker caches
21
+ TICKER_AVG_VOLUME = TWELVE_HOURS
22
+ TICKER_FREE_FLOAT = TWELVE_HOURS
23
+ TICKER_SNAPSHOTS = EIGHT_HOURS
24
+
25
+ # Scanner caches
26
+ TOP_STOCKS_SCANNER = EIGHT_HOURS
27
+ TOP_VOLUME_SCANNER = THREE_DAYS
28
+ TOP_GAINERS_SCANNER = THREE_DAYS
29
+ TOP_GAPPERS_SCANNER = THREE_DAYS
30
+
@@ -49,8 +49,8 @@ def test_analyze_data_with_valid_luld_event_expect_valid_result(valid_symbol, va
49
49
 
50
50
  # Assert
51
51
  assert len(result) == 1
52
- assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
53
- assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
52
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
53
+ # assert result[0].cache_ttl == MarketDataCacheTTL.HALTS.value
54
54
  assert result[0].publish_key == f"{MarketDataCacheKeys.HALTS.value}:{symbol}"
55
55
  assert result[0].data == data
56
56
 
@@ -64,8 +64,8 @@ def test_analyze_data_with_equity_agg_event_happy_path(valid_symbol, valid_equit
64
64
 
65
65
  # Assert
66
66
  assert len(result) == 1
67
- assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
68
- assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
67
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
68
+ # assert result[0].cache_ttl == MarketDataCacheTTL.AGGREGATE.value
69
69
  assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
70
70
  assert result[0].data == valid_equity_agg_data
71
71
 
@@ -79,8 +79,8 @@ def test_analyze_data_with_equity_agg_min_event_happy_path(valid_symbol, valid_e
79
79
 
80
80
  # Assert
81
81
  assert len(result) == 1
82
- assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
83
- assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
82
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
83
+ # assert result[0].cache_ttl == MarketDataCacheTTL.AGGREGATE.value
84
84
  assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
85
85
  assert result[0].data == valid_equity_agg_minute_data
86
86
 
@@ -94,8 +94,8 @@ def test_analyze_data_with_equity_trade_event_happy_path(valid_symbol, valid_equ
94
94
 
95
95
  # Assert
96
96
  assert len(result) == 1
97
- assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
98
- assert result[0].cache_ttl == MarketDataCacheTTL.EIGHT_HOURS.value
97
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
98
+ # assert result[0].cache_ttl == MarketDataCacheTTL.TRADES.value
99
99
  assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
100
100
  assert result[0].data == valid_equity_trade_data
101
101
 
@@ -109,8 +109,8 @@ def test_analyze_data_equity_quote_event_happy_path(valid_symbol, valid_equity_q
109
109
 
110
110
  # Assert
111
111
  assert len(result) == 1
112
- assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
113
- assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
112
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
113
+ # assert result[0].cache_ttl == MarketDataCacheTTL.QUOTES.value
114
114
  assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
115
115
  assert result[0].data == valid_equity_quote_data
116
116
 
@@ -167,7 +167,7 @@ def test_handle_luld_event_happy_path(valid_symbol, valid_luld_data):
167
167
  # Assert
168
168
  assert len(result) == 1
169
169
  assert result[0].cache_key == f"{MarketDataCacheKeys.HALTS.value}:{valid_symbol}"
170
- assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
170
+ assert result[0].cache_ttl == MarketDataCacheTTL.HALTS.value
171
171
  assert result[0].publish_key == f"{MarketDataCacheKeys.HALTS.value}:{valid_symbol}"
172
172
  assert result[0].data == valid_luld_data
173
173
 
@@ -181,8 +181,8 @@ def test_handle_equity_agg_event_happy_path(valid_symbol, valid_equity_agg_data)
181
181
 
182
182
  # Assert
183
183
  assert len(result) == 1
184
- assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
185
- assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
184
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
185
+ # assert result[0].cache_ttl == MarketDataCacheTTL.AGGREGATE.value
186
186
  assert result[0].publish_key == f"{MarketDataCacheKeys.AGGREGATE.value}:{valid_symbol}"
187
187
  assert result[0].data == valid_equity_agg_data
188
188
 
@@ -196,8 +196,8 @@ def test_handle_equity_trade_event_happy_path(valid_symbol, valid_equity_trade_d
196
196
 
197
197
  # Assert
198
198
  assert len(result) == 1
199
- assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
200
- assert result[0].cache_ttl == MarketDataCacheTTL.EIGHT_HOURS.value
199
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
200
+ # assert result[0].cache_ttl == MarketDataCacheTTL.TRADES.value
201
201
  assert result[0].publish_key == f"{MarketDataCacheKeys.TRADES.value}:{valid_symbol}"
202
202
  assert result[0].data == valid_equity_trade_data
203
203
 
@@ -211,8 +211,8 @@ def test_handle_equity_quote_event_happy_path(valid_symbol, valid_equity_quote_d
211
211
 
212
212
  # Assert
213
213
  assert len(result) == 1
214
- assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
215
- assert result[0].cache_ttl == MarketDataCacheTTL.THREE_DAYS.value
214
+ # assert result[0].cache_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
215
+ # assert result[0].cache_ttl == MarketDataCacheTTL.QUOTES.value
216
216
  assert result[0].publish_key == f"{MarketDataCacheKeys.QUOTES.value}:{valid_symbol}"
217
217
  assert result[0].data == valid_equity_quote_data
218
218
 
@@ -228,6 +228,6 @@ def test_handle_unknown_event_happy_path():
228
228
  # Assert
229
229
  assert len(result) == 1
230
230
  assert result[0].cache_key.startswith(f"{MarketDataCacheKeys.UNKNOWN.value}:")
231
- assert result[0].cache_ttl == MarketDataCacheTTL.ONE_DAY.value
231
+ assert result[0].cache_ttl == MarketDataCacheTTL.UNKNOWN.value
232
232
  assert result[0].publish_key == MarketDataCacheKeys.UNKNOWN.value
233
233
  assert result[0].data == data
@@ -6,6 +6,7 @@ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
6
6
  from massive.rest.models import TickerSnapshot
7
7
 
8
8
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
9
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
9
10
 
10
11
 
11
12
  @pytest.fixture
@@ -289,10 +290,10 @@ async def test_get_avg_volume_caches_with_correct_ttl():
289
290
  # Assert
290
291
  mock_redis_client.get.assert_awaited_once_with(mock_cache_key)
291
292
  mock_rest_client.list_financials_ratios.assert_called_once_with(ticker="TEST")
292
- # Verify setex was called with the correct TTL (TWELVE_HOURS = 43200 seconds)
293
+ # Verify setex was called with the correct TTL
293
294
  call_args = mock_redis_client.setex.await_args
294
295
  assert call_args[0][0] == mock_cache_key
295
- assert call_args[0][1] == 43200 # MarketDataCacheTTL.TWELVE_HOURS.value
296
+ assert call_args[0][1] == MarketDataCacheTTL.TICKER_AVG_VOLUME.value
296
297
  assert result == mock_avg_volume
297
298
 
298
299
 
@@ -318,10 +319,10 @@ async def test_get_avg_volume_caches_with_correct_ttl():
318
319
  # Assert
319
320
  mock_redis_client.get.assert_awaited_once_with(mock_cache_key)
320
321
  mock_rest_client.list_financials_ratios.assert_called_once_with(ticker="TEST")
321
- # Verify setex was called with the correct TTL (TWELVE_HOURS = 43200 seconds)
322
+ # Verify setex was called with the correct TTL
322
323
  call_args = mock_redis_client.setex.await_args
323
324
  assert call_args[0][0] == mock_cache_key
324
- assert call_args[0][1] == 43200 # MarketDataCacheTTL.TWELVE_HOURS.value
325
+ assert call_args[0][1] == MarketDataCacheTTL.TICKER_AVG_VOLUME.value
325
326
  assert result == mock_avg_volume
326
327
 
327
328
 
@@ -445,7 +446,7 @@ async def test_get_free_float_caches_with_correct_ttl():
445
446
  # Verify setex was called with the correct TTL (TWELVE_HOURS = 43200 seconds)
446
447
  call_args = mock_redis_client.setex.await_args
447
448
  assert call_args[0][0] == mock_cache_key
448
- assert call_args[0][1] == 43200 # MarketDataCacheTTL.TWELVE_HOURS.value
449
+ assert call_args[0][1] == MarketDataCacheTTL.TICKER_FREE_FLOAT.value
449
450
  assert result == mock_free_float
450
451
 
451
452
 
@@ -1,20 +0,0 @@
1
- from enum import Enum
2
-
3
-
4
- class MarketDataCacheTTL(Enum):
5
- # Hours
6
- ONE_HOUR = 3600
7
- TWO_HOURS = 7200
8
- FOUR_HOURS = 14400
9
- SIX_HOURS = 21600
10
- EIGHT_HOURS = 28800
11
- TWELVE_HOURS = 43200
12
-
13
- # Days
14
- ONE_DAY = 86400
15
- TWO_DAYS = 172800
16
- THREE_DAYS = 259200
17
- FOUR_DAYS = 345600
18
- FIVE_DAYS = 432000
19
- SIX_DAYS = 518400
20
- SEVEN_DAYS = 604800
File without changes