kuhl-haus-mdp 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,6 +5,7 @@ from massive.websocket.models import EventType
5
5
 
6
6
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
7
7
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
8
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
8
9
 
9
10
 
10
11
  class MassiveDataAnalyzer:
@@ -18,7 +19,7 @@ class MassiveDataAnalyzer:
18
19
  EventType.EquityQuote.value: self.handle_equity_quote_event,
19
20
  }
20
21
 
21
- async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
22
+ def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
22
23
  """
23
24
  Process raw market data message
24
25
 
@@ -30,73 +31,63 @@ class MassiveDataAnalyzer:
30
31
  """
31
32
  if "event_type" not in data:
32
33
  self.logger.info("Message missing 'event_type'")
33
- return await self.handle_unknown_event(data)
34
+ return self.handle_unknown_event(data)
34
35
  event_type = data.get("event_type")
35
36
 
36
37
  if "symbol" not in data:
37
38
  self.logger.info("Message missing 'symbol'")
38
- return await self.handle_unknown_event(data)
39
+ return self.handle_unknown_event(data)
39
40
  symbol = data.get("symbol")
40
41
 
41
42
  if event_type in self.event_handlers:
42
- return await self.event_handlers[event_type](**{"data": data, "symbol": symbol})
43
+ return self.event_handlers[event_type](**{"data": data, "symbol": symbol})
43
44
  else:
44
45
  self.logger.warning(f"Unsupported message type: {event_type}")
45
- return await self.handle_unknown_event(data)
46
+ return self.handle_unknown_event(data)
46
47
 
47
- async def handle_luld_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
48
- try:
49
- return [MarketDataAnalyzerResult(
50
- data=data,
51
- cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
52
- cache_ttl=28500, # 7 hours, 55 minutes
53
- publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
54
- )]
55
- except Exception as e:
56
- self.logger.error(f"Error processing LULD message for {symbol}: {data}", e)
48
+ @staticmethod
49
+ def handle_luld_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
50
+ return [MarketDataAnalyzerResult(
51
+ data=data,
52
+ cache_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
53
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
54
+ publish_key=f"{MarketDataCacheKeys.HALTS.value}:{symbol}",
55
+ )]
57
56
 
58
- async def handle_equity_agg_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
- try:
60
- return [MarketDataAnalyzerResult(
61
- data=data,
62
- # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
63
- # cache_ttl=259200, # 3 days
64
- publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
65
- )]
66
- except Exception as e:
67
- self.logger.error(f"Error processing EquityAgg message for {symbol}: {data}", e)
57
+ @staticmethod
58
+ def handle_equity_agg_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
59
+ return [MarketDataAnalyzerResult(
60
+ data=data,
61
+ cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
62
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
63
+ publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
64
+ )]
68
65
 
69
- async def handle_equity_trade_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
70
- try:
71
- return [MarketDataAnalyzerResult(
72
- data=data,
73
- # cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
74
- # cache_ttl=28500, # 7 hours, 55 minutes
75
- publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
76
- )]
77
- except Exception as e:
78
- self.logger.error(f"Error processing EquityTrade message for {symbol}: {data}", e)
66
+ @staticmethod
67
+ def handle_equity_trade_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
68
+ return [MarketDataAnalyzerResult(
69
+ data=data,
70
+ cache_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
71
+ cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value,
72
+ publish_key=f"{MarketDataCacheKeys.TRADES.value}:{symbol}",
73
+ )]
79
74
 
80
- async def handle_equity_quote_event(self, data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
81
- try:
82
- return [MarketDataAnalyzerResult(
83
- data=data,
84
- # cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
85
- # cache_ttl=259200, # 3 days
86
- publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
87
- )]
88
- except Exception as e:
89
- self.logger.error(f"Error processing EquityQuote message for {symbol}: {data}", e)
75
+ @staticmethod
76
+ def handle_equity_quote_event(data: dict, symbol: str) -> Optional[List[MarketDataAnalyzerResult]]:
77
+ return [MarketDataAnalyzerResult(
78
+ data=data,
79
+ cache_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
80
+ cache_ttl=MarketDataCacheTTL.THREE_DAYS.value,
81
+ publish_key=f"{MarketDataCacheKeys.QUOTES.value}:{symbol}",
82
+ )]
90
83
 
91
- async def handle_unknown_event(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
92
- try:
93
- timestamp = f"{time()}".replace('.','')
94
- cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
95
- return [MarketDataAnalyzerResult(
96
- data=data,
97
- cache_key=cache_key,
98
- cache_ttl=86400, # 1 days
99
- publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
100
- )]
101
- except Exception as e:
102
- self.logger.error(f"Error processing unknown message type: {data}", e)
84
+ @staticmethod
85
+ def handle_unknown_event(data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
86
+ timestamp = f"{time()}".replace('.','')
87
+ cache_key = f"{MarketDataCacheKeys.UNKNOWN.value}:{timestamp}"
88
+ return [MarketDataAnalyzerResult(
89
+ data=data,
90
+ cache_key=cache_key,
91
+ cache_ttl=MarketDataCacheTTL.ONE_DAY.value,
92
+ publish_key=f"{MarketDataCacheKeys.UNKNOWN.value}",
93
+ )]
@@ -1,167 +1,25 @@
1
1
  import logging
2
2
  import time
3
- from collections import defaultdict
4
- from dataclasses import dataclass, field
5
3
  from datetime import datetime, timezone, timedelta
6
- from typing import Dict, Optional, List, Iterator
4
+ from typing import Optional, List, Iterator
7
5
  from zoneinfo import ZoneInfo
8
6
 
7
+ from massive.exceptions import BadResponse
9
8
  from massive.rest import RESTClient
10
9
  from massive.rest.models import (
11
10
  TickerSnapshot,
12
11
  Agg,
13
12
  )
14
13
  from massive.websocket.models import (
15
- EquityTrade,
16
14
  EquityAgg,
17
15
  EventType
18
16
  )
19
- from massive.exceptions import BadResponse
20
17
 
21
18
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
22
19
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
23
20
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
24
21
  from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
25
-
26
-
27
- # docs
28
- # https://massive.com/docs/stocks/ws_stocks_am
29
- # https://massive.com/docs/websocket/stocks/trades
30
-
31
- @dataclass()
32
- class TopStocksCacheItem:
33
- day_start_time: Optional[float] = 0.0
34
-
35
- # Cached details for each ticker
36
- symbol_data_cache: Optional[Dict[str, dict]] = field(default_factory=lambda: defaultdict(dict))
37
-
38
- # Top Volume map
39
- top_volume_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
40
-
41
- # Top Gappers map
42
- top_gappers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
43
-
44
- # Top Gainers map
45
- top_gainers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
46
-
47
- def to_dict(self):
48
- ret = {
49
- # Cache start time
50
- "day_start_time": self.day_start_time,
51
-
52
- # Maps
53
- "symbol_data_cache": self.symbol_data_cache,
54
- "top_volume_map": self.top_volume_map,
55
- "top_gappers_map": self.top_gappers_map,
56
- "top_gainers_map": self.top_gainers_map,
57
- }
58
- return ret
59
-
60
- def top_volume(self, limit):
61
- ret = []
62
- for ticker, volume in sorted(self.top_volume_map.items(), key=lambda x: x[1], reverse=True)[
63
- :limit
64
- ]:
65
- try:
66
- ret.append({
67
- "symbol": ticker,
68
- "volume": self.symbol_data_cache[ticker]["volume"],
69
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
70
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
71
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
72
- "vwap": self.symbol_data_cache[ticker]["vwap"],
73
- "open": self.symbol_data_cache[ticker]["open"],
74
- "close": self.symbol_data_cache[ticker]["close"],
75
- "high": self.symbol_data_cache[ticker]["high"],
76
- "low": self.symbol_data_cache[ticker]["low"],
77
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
78
- "average_size": self.symbol_data_cache[ticker]["average_size"],
79
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
80
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
81
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
82
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
83
- "change": self.symbol_data_cache[ticker]["change"],
84
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
85
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
86
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
87
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
88
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
89
- })
90
- except KeyError:
91
- del self.top_volume_map[ticker]
92
- return ret
93
-
94
- def top_gappers(self, limit):
95
- ret = []
96
- for ticker, pct_change in sorted(self.top_gappers_map.items(), key=lambda x: x[1], reverse=True)[
97
- :limit
98
- ]:
99
- try:
100
- if pct_change <= 0:
101
- break
102
- ret.append({
103
- "symbol": ticker,
104
- "volume": self.symbol_data_cache[ticker]["volume"],
105
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
106
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
107
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
108
- "vwap": self.symbol_data_cache[ticker]["vwap"],
109
- "open": self.symbol_data_cache[ticker]["open"],
110
- "close": self.symbol_data_cache[ticker]["close"],
111
- "high": self.symbol_data_cache[ticker]["high"],
112
- "low": self.symbol_data_cache[ticker]["low"],
113
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
114
- "average_size": self.symbol_data_cache[ticker]["average_size"],
115
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
116
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
117
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
118
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
119
- "change": self.symbol_data_cache[ticker]["change"],
120
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
121
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
122
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
123
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
124
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
125
- })
126
- except KeyError:
127
- del self.top_gappers_map[ticker]
128
- return ret
129
-
130
- def top_gainers(self, limit):
131
- ret = []
132
- for ticker, pct_change in sorted(self.top_gainers_map.items(), key=lambda x: x[1], reverse=True)[
133
- :limit
134
- ]:
135
- try:
136
- if pct_change <= 0:
137
- break
138
- ret.append({
139
- "symbol": ticker,
140
- "volume": self.symbol_data_cache[ticker]["volume"],
141
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
142
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
143
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
144
- "vwap": self.symbol_data_cache[ticker]["vwap"],
145
- "open": self.symbol_data_cache[ticker]["open"],
146
- "close": self.symbol_data_cache[ticker]["close"],
147
- "high": self.symbol_data_cache[ticker]["high"],
148
- "low": self.symbol_data_cache[ticker]["low"],
149
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
150
- "average_size": self.symbol_data_cache[ticker]["average_size"],
151
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
152
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
153
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
154
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
155
- "change": self.symbol_data_cache[ticker]["change"],
156
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
157
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
158
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
159
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
160
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
161
- })
162
- except KeyError:
163
- del self.top_gainers_map[ticker]
164
- return ret
22
+ from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
165
23
 
166
24
 
167
25
  class TopStocksAnalyzer(Analyzer):
@@ -233,12 +91,6 @@ class TopStocksAnalyzer(Analyzer):
233
91
  self.last_update_time = current_time
234
92
 
235
93
  result = [
236
- # MarketDataAnalyzerResult(
237
- # data=data,
238
- # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
239
- # cache_ttl=86400, # 1 day
240
- # # publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
241
- # ),
242
94
  MarketDataAnalyzerResult(
243
95
  data=self.cache_item.to_dict(),
244
96
  cache_key=self.cache_key,
@@ -114,8 +114,8 @@ class MassiveDataProcessor:
114
114
  web_socket_message = json.loads(message.body.decode())
115
115
  data = WebSocketMessageSerde.to_dict(web_socket_message)
116
116
 
117
- # Delegate to analyzer (async)
118
- analyzer_results = await self.analyzer.analyze_data(data)
117
+ # Delegate to analyzer
118
+ analyzer_results = self.analyzer.analyze_data(data)
119
119
  if analyzer_results:
120
120
  self.processed += 1
121
121
  for analyzer_result in analyzer_results:
@@ -0,0 +1,19 @@
1
+ from enum import Enum
2
+
3
+
4
+ class MarketDataCacheTTL(Enum):
5
+ # Hours
6
+ ONE_HOUR = 3600
7
+ TWO_HOURS = 7200
8
+ FOUR_HOURS = 14400
9
+ SIX_HOURS = 21600
10
+ EIGHT_HOURS = 28800
11
+
12
+ # Days
13
+ ONE_DAY = 86400
14
+ TWO_DAYS = 172800
15
+ THREE_DAYS = 259200
16
+ FOUR_DAYS = 345600
17
+ FIVE_DAYS = 432000
18
+ SIX_DAYS = 518400
19
+ SEVEN_DAYS = 604800
@@ -0,0 +1,143 @@
1
+ from collections import defaultdict
2
+ from dataclasses import dataclass, field
3
+ from typing import Dict, Optional
4
+
5
+
6
+ # docs
7
+ # https://massive.com/docs/stocks/ws_stocks_am
8
+ # https://massive.com/docs/websocket/stocks/trades
9
+
10
+ @dataclass()
11
+ class TopStocksCacheItem:
12
+ day_start_time: Optional[float] = 0.0
13
+
14
+ # Cached details for each ticker
15
+ symbol_data_cache: Optional[Dict[str, dict]] = field(default_factory=lambda: defaultdict(dict))
16
+
17
+ # Top Volume map
18
+ top_volume_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
19
+
20
+ # Top Gappers map
21
+ top_gappers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
22
+
23
+ # Top Gainers map
24
+ top_gainers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
25
+
26
+ def to_dict(self):
27
+ ret = {
28
+ # Cache start time
29
+ "day_start_time": self.day_start_time,
30
+
31
+ # Maps
32
+ "symbol_data_cache": self.symbol_data_cache,
33
+ "top_volume_map": self.top_volume_map,
34
+ "top_gappers_map": self.top_gappers_map,
35
+ "top_gainers_map": self.top_gainers_map,
36
+ }
37
+ return ret
38
+
39
+ def top_volume(self, limit):
40
+ ret = []
41
+ for ticker, volume in sorted(self.top_volume_map.items(), key=lambda x: x[1], reverse=True)[
42
+ :limit
43
+ ]:
44
+ try:
45
+ ret.append({
46
+ "symbol": ticker,
47
+ "volume": self.symbol_data_cache[ticker]["volume"],
48
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
49
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
50
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
51
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
52
+ "open": self.symbol_data_cache[ticker]["open"],
53
+ "close": self.symbol_data_cache[ticker]["close"],
54
+ "high": self.symbol_data_cache[ticker]["high"],
55
+ "low": self.symbol_data_cache[ticker]["low"],
56
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
57
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
58
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
59
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
60
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
61
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
62
+ "change": self.symbol_data_cache[ticker]["change"],
63
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
64
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
65
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
66
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
67
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
68
+ })
69
+ except KeyError:
70
+ del self.top_volume_map[ticker]
71
+ return ret
72
+
73
+ def top_gappers(self, limit):
74
+ ret = []
75
+ for ticker, pct_change in sorted(self.top_gappers_map.items(), key=lambda x: x[1], reverse=True)[
76
+ :limit
77
+ ]:
78
+ try:
79
+ if pct_change <= 0:
80
+ break
81
+ ret.append({
82
+ "symbol": ticker,
83
+ "volume": self.symbol_data_cache[ticker]["volume"],
84
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
85
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
86
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
87
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
88
+ "open": self.symbol_data_cache[ticker]["open"],
89
+ "close": self.symbol_data_cache[ticker]["close"],
90
+ "high": self.symbol_data_cache[ticker]["high"],
91
+ "low": self.symbol_data_cache[ticker]["low"],
92
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
93
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
94
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
95
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
96
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
97
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
98
+ "change": self.symbol_data_cache[ticker]["change"],
99
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
100
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
101
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
102
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
103
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
104
+ })
105
+ except KeyError:
106
+ del self.top_gappers_map[ticker]
107
+ return ret
108
+
109
+ def top_gainers(self, limit):
110
+ ret = []
111
+ for ticker, pct_change in sorted(self.top_gainers_map.items(), key=lambda x: x[1], reverse=True)[
112
+ :limit
113
+ ]:
114
+ try:
115
+ if pct_change <= 0:
116
+ break
117
+ ret.append({
118
+ "symbol": ticker,
119
+ "volume": self.symbol_data_cache[ticker]["volume"],
120
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
121
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
122
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
123
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
124
+ "open": self.symbol_data_cache[ticker]["open"],
125
+ "close": self.symbol_data_cache[ticker]["close"],
126
+ "high": self.symbol_data_cache[ticker]["high"],
127
+ "low": self.symbol_data_cache[ticker]["low"],
128
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
129
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
130
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
131
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
132
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
133
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
134
+ "change": self.symbol_data_cache[ticker]["change"],
135
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
136
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
137
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
138
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
139
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
140
+ })
141
+ except KeyError:
142
+ del self.top_gainers_map[ticker]
143
+ return ret
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -1,8 +1,8 @@
1
1
  kuhl_haus/mdp/__init__.py,sha256=5dEpAdB3kypH8tCRECoXwbly1WV9kFU5kh8ldGSa0VI,349
2
2
  kuhl_haus/mdp/analyzers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  kuhl_haus/mdp/analyzers/analyzer.py,sha256=eluYM2Iib5kgbpNZUSk2qEUL-j83ZTb3zmEmRazrmiM,404
4
- kuhl_haus/mdp/analyzers/massive_data_analyzer.py,sha256=4EEKDJjJmxCdL5nFa87AogabBHgGVld02UuWTgqfFjI,4536
5
- kuhl_haus/mdp/analyzers/top_stocks.py,sha256=AbRnPHSVrJgUq3CDV8SaNstldqoimlI23gpG69lzYBM,18759
4
+ kuhl_haus/mdp/analyzers/massive_data_analyzer.py,sha256=WSb7T8X4u2ue7Du7sf_fqxjgjEbR6ThllSNT1CncIM0,3866
5
+ kuhl_haus/mdp/analyzers/top_stocks.py,sha256=nvNA-NkxMjVO0MqFuAvG-v3UdSP7iWDRGI7GxpPBzWw,10876
6
6
  kuhl_haus/mdp/components/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  kuhl_haus/mdp/components/market_data_cache.py,sha256=r5sJHuSuLiw9BVckW--aWZHHIMqOTCf-pFURA7kef3Q,1070
8
8
  kuhl_haus/mdp/components/market_data_scanner.py,sha256=vA0HPqVIvuZb93wzJhtER6fcH6bf85AgXCbu7yVFOFE,9152
@@ -12,18 +12,20 @@ kuhl_haus/mdp/helpers/process_manager.py,sha256=Is3Jx8nlBWvywQ1acdsdaSJTAG0olKsk
12
12
  kuhl_haus/mdp/helpers/queue_name_resolver.py,sha256=l_zfRLxrjR9uwRCV2VDO4vPWLK_lj5KVG2p4Lh8xWiw,770
13
13
  kuhl_haus/mdp/integ/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  kuhl_haus/mdp/integ/massive_data_listener.py,sha256=fPEYc6zZzHzFFjbP3zFInajKtEGInj8UQKKo3nKQEwQ,5098
15
- kuhl_haus/mdp/integ/massive_data_processor.py,sha256=9I0tH9sZNs9Y0TyKIBKix_qlEksEZt5NRfP-Zf3FovE,8708
15
+ kuhl_haus/mdp/integ/massive_data_processor.py,sha256=qktzLfuqrOgE4C9iZs4mXFvHt2BckgevRP8pEakzggA,8694
16
16
  kuhl_haus/mdp/integ/massive_data_queues.py,sha256=zC_uV2vwZCMyVerDQ18RAQwIMMF75iK4qUSqwuWqgwc,5050
17
17
  kuhl_haus/mdp/integ/utils.py,sha256=9JEpl2yr2LghOLrJUDxi-4dtDK3DZ1wBTZ1uxBJsFbQ,1309
18
18
  kuhl_haus/mdp/integ/web_socket_message_serde.py,sha256=XdaoaByc7IhtzbPDXBtXKOTjyDzfPSDuZVCoHSIaTl4,5468
19
19
  kuhl_haus/mdp/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  kuhl_haus/mdp/models/market_data_analyzer_result.py,sha256=iICb5GVCtuqARNbR1JNCAfbxMijM3uppDNdL8_FB3eI,422
21
21
  kuhl_haus/mdp/models/market_data_cache_keys.py,sha256=5iScBMhVQaG3p9P45veE-uRT7c6JY7k6j4DcvSEXENA,942
22
+ kuhl_haus/mdp/models/market_data_cache_ttl.py,sha256=4KvsPeg84-sp4viUX6reN8CZYiM2aF9FgfXQmPbj3hw,348
22
23
  kuhl_haus/mdp/models/market_data_pubsub_keys.py,sha256=PEIPXK9jBehJB7G4pqoSuQZcfMZgOQq8Yho1itqv-1A,1306
23
24
  kuhl_haus/mdp/models/market_data_scanner_names.py,sha256=BYn1C0rYgGF1Sq583BkHADKUu-28ytNZQ-XgptuCH-Y,260
24
25
  kuhl_haus/mdp/models/massive_data_queue.py,sha256=MfYBcjVc4Fi61DWIvvhhWLUOiLmRpE9egtW-2KH6FTE,188
25
- kuhl_haus_mdp-0.1.1.dist-info/METADATA,sha256=Pwfg52rEA4A6MQdJtiUdF4BgL-U2NjSFPDCAYJYdaYE,8688
26
- kuhl_haus_mdp-0.1.1.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
27
- kuhl_haus_mdp-0.1.1.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
28
- kuhl_haus_mdp-0.1.1.dist-info/licenses/LICENSE.txt,sha256=DRkJftAJcMqoTkQ_Y6-HtKj3nm4pZah_p8XBZiYnw-c,1079
29
- kuhl_haus_mdp-0.1.1.dist-info/RECORD,,
26
+ kuhl_haus/mdp/models/top_stocks_cache_item.py,sha256=4vwwPTMkRRf1ct6iFInJnLSbBadM-tRk-zhqdD_ITE0,7676
27
+ kuhl_haus_mdp-0.1.3.dist-info/METADATA,sha256=0SI4PelAQU2MlicnUa8LtzqrKLCKcNQxfONkjQldC3g,8688
28
+ kuhl_haus_mdp-0.1.3.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
29
+ kuhl_haus_mdp-0.1.3.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
30
+ kuhl_haus_mdp-0.1.3.dist-info/licenses/LICENSE.txt,sha256=DRkJftAJcMqoTkQ_Y6-HtKj3nm4pZah_p8XBZiYnw-c,1079
31
+ kuhl_haus_mdp-0.1.3.dist-info/RECORD,,