kuhl-haus-mdp 0.1.2__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,176 +1,35 @@
1
1
  import logging
2
2
  import time
3
- from collections import defaultdict
4
- from dataclasses import dataclass, field
5
3
  from datetime import datetime, timezone, timedelta
6
- from typing import Dict, Optional, List, Iterator
4
+ from typing import Optional, List, Iterator
7
5
  from zoneinfo import ZoneInfo
8
6
 
7
+ from massive.exceptions import BadResponse
9
8
  from massive.rest import RESTClient
10
9
  from massive.rest.models import (
11
10
  TickerSnapshot,
12
11
  Agg,
13
12
  )
14
13
  from massive.websocket.models import (
15
- EquityTrade,
16
14
  EquityAgg,
17
15
  EventType
18
16
  )
19
- from massive.exceptions import BadResponse
20
17
 
21
18
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
19
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
22
20
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
23
21
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
24
22
  from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
25
-
26
-
27
- # docs
28
- # https://massive.com/docs/stocks/ws_stocks_am
29
- # https://massive.com/docs/websocket/stocks/trades
30
-
31
- @dataclass()
32
- class TopStocksCacheItem:
33
- day_start_time: Optional[float] = 0.0
34
-
35
- # Cached details for each ticker
36
- symbol_data_cache: Optional[Dict[str, dict]] = field(default_factory=lambda: defaultdict(dict))
37
-
38
- # Top Volume map
39
- top_volume_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
40
-
41
- # Top Gappers map
42
- top_gappers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
43
-
44
- # Top Gainers map
45
- top_gainers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
46
-
47
- def to_dict(self):
48
- ret = {
49
- # Cache start time
50
- "day_start_time": self.day_start_time,
51
-
52
- # Maps
53
- "symbol_data_cache": self.symbol_data_cache,
54
- "top_volume_map": self.top_volume_map,
55
- "top_gappers_map": self.top_gappers_map,
56
- "top_gainers_map": self.top_gainers_map,
57
- }
58
- return ret
59
-
60
- def top_volume(self, limit):
61
- ret = []
62
- for ticker, volume in sorted(self.top_volume_map.items(), key=lambda x: x[1], reverse=True)[
63
- :limit
64
- ]:
65
- try:
66
- ret.append({
67
- "symbol": ticker,
68
- "volume": self.symbol_data_cache[ticker]["volume"],
69
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
70
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
71
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
72
- "vwap": self.symbol_data_cache[ticker]["vwap"],
73
- "open": self.symbol_data_cache[ticker]["open"],
74
- "close": self.symbol_data_cache[ticker]["close"],
75
- "high": self.symbol_data_cache[ticker]["high"],
76
- "low": self.symbol_data_cache[ticker]["low"],
77
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
78
- "average_size": self.symbol_data_cache[ticker]["average_size"],
79
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
80
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
81
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
82
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
83
- "change": self.symbol_data_cache[ticker]["change"],
84
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
85
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
86
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
87
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
88
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
89
- })
90
- except KeyError:
91
- del self.top_volume_map[ticker]
92
- return ret
93
-
94
- def top_gappers(self, limit):
95
- ret = []
96
- for ticker, pct_change in sorted(self.top_gappers_map.items(), key=lambda x: x[1], reverse=True)[
97
- :limit
98
- ]:
99
- try:
100
- if pct_change <= 0:
101
- break
102
- ret.append({
103
- "symbol": ticker,
104
- "volume": self.symbol_data_cache[ticker]["volume"],
105
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
106
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
107
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
108
- "vwap": self.symbol_data_cache[ticker]["vwap"],
109
- "open": self.symbol_data_cache[ticker]["open"],
110
- "close": self.symbol_data_cache[ticker]["close"],
111
- "high": self.symbol_data_cache[ticker]["high"],
112
- "low": self.symbol_data_cache[ticker]["low"],
113
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
114
- "average_size": self.symbol_data_cache[ticker]["average_size"],
115
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
116
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
117
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
118
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
119
- "change": self.symbol_data_cache[ticker]["change"],
120
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
121
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
122
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
123
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
124
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
125
- })
126
- except KeyError:
127
- del self.top_gappers_map[ticker]
128
- return ret
129
-
130
- def top_gainers(self, limit):
131
- ret = []
132
- for ticker, pct_change in sorted(self.top_gainers_map.items(), key=lambda x: x[1], reverse=True)[
133
- :limit
134
- ]:
135
- try:
136
- if pct_change <= 0:
137
- break
138
- ret.append({
139
- "symbol": ticker,
140
- "volume": self.symbol_data_cache[ticker]["volume"],
141
- "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
142
- "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
143
- "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
144
- "vwap": self.symbol_data_cache[ticker]["vwap"],
145
- "open": self.symbol_data_cache[ticker]["open"],
146
- "close": self.symbol_data_cache[ticker]["close"],
147
- "high": self.symbol_data_cache[ticker]["high"],
148
- "low": self.symbol_data_cache[ticker]["low"],
149
- "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
150
- "average_size": self.symbol_data_cache[ticker]["average_size"],
151
- "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
152
- "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
153
- "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
154
- "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
155
- "change": self.symbol_data_cache[ticker]["change"],
156
- "pct_change": self.symbol_data_cache[ticker]["pct_change"],
157
- "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
158
- "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
159
- "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
160
- "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
161
- })
162
- except KeyError:
163
- del self.top_gainers_map[ticker]
164
- return ret
23
+ from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
165
24
 
166
25
 
167
26
  class TopStocksAnalyzer(Analyzer):
168
27
 
169
- def __init__(self, rest_client: RESTClient, **kwargs):
28
+ def __init__(self, cache: MarketDataCache, **kwargs):
170
29
  if "cache_key" not in kwargs:
171
30
  kwargs["cache_key"] = MarketDataCacheKeys.TOP_STOCKS_SCANNER.value
172
31
  super().__init__(**kwargs)
173
- self.rest_client = rest_client
32
+ self.cache = cache
174
33
  self.logger = logging.getLogger(__name__)
175
34
  self.cache_item = TopStocksCacheItem()
176
35
  self.last_update_time = 0
@@ -233,12 +92,6 @@ class TopStocksAnalyzer(Analyzer):
233
92
  self.last_update_time = current_time
234
93
 
235
94
  result = [
236
- # MarketDataAnalyzerResult(
237
- # data=data,
238
- # cache_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
239
- # cache_ttl=86400, # 1 day
240
- # # publish_key=f"{MarketDataCacheKeys.AGGREGATE.value}:{symbol}",
241
- # ),
242
95
  MarketDataAnalyzerResult(
243
96
  data=self.cache_item.to_dict(),
244
97
  cache_key=self.cache_key,
@@ -283,7 +136,7 @@ class TopStocksAnalyzer(Analyzer):
283
136
  prev_day_vwap = 0
284
137
  while retry_count < max_tries:
285
138
  try:
286
- snapshot = await self.get_ticker_snapshot(event.symbol)
139
+ snapshot = await self.cache.get_ticker_snapshot(event.symbol)
287
140
  prev_day_close = snapshot.prev_day.close
288
141
  prev_day_volume = snapshot.prev_day.volume
289
142
  prev_day_vwap = snapshot.prev_day.vwap
@@ -301,7 +154,7 @@ class TopStocksAnalyzer(Analyzer):
301
154
  avg_volume = 0
302
155
  while retry_count < max_tries:
303
156
  try:
304
- avg_volume = await self.get_avg_volume(event.symbol)
157
+ avg_volume = await self.cache.get_avg_volume(event.symbol)
305
158
  break
306
159
  except (BadResponse, ZeroDivisionError) as e:
307
160
  self.logger.error(f"Error getting average volume for {event.symbol}: {repr(e)}", exc_info=e, stack_info=True)
@@ -365,44 +218,3 @@ class TopStocksAnalyzer(Analyzer):
365
218
  "start_timestamp": event.start_timestamp,
366
219
  "end_timestamp": event.end_timestamp,
367
220
  }
368
-
369
- async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
370
- self.logger.debug(f"Getting snapshot for {ticker}")
371
- result: TickerSnapshot = self.rest_client.get_snapshot_ticker(
372
- market_type="stocks",
373
- ticker=ticker
374
- )
375
- self.logger.debug(f"Snapshot result: {result}")
376
- return result
377
-
378
- async def get_avg_volume(self, ticker: str):
379
- self.logger.debug(f"Getting average volume for {ticker}")
380
- # Get date string in YYYY-MM-DD format
381
- end_date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
382
- # Get date from 30 trading sessions ago in YYYY-MM-DD format
383
- start_date = (datetime.now(timezone.utc) - timedelta(days=42)).strftime("%Y-%m-%d")
384
-
385
- result: Iterator[Agg] = self.rest_client.list_aggs(
386
- ticker=ticker,
387
- multiplier=1,
388
- timespan="day",
389
- from_=start_date,
390
- to=end_date,
391
- adjusted=True,
392
- sort="desc"
393
- )
394
- self.logger.debug(f"average volume result: {result}")
395
-
396
- total_volume = 0
397
- max_periods = 30
398
- periods_calculated = 0
399
- for agg in result:
400
- if periods_calculated < max_periods:
401
- total_volume += agg.volume
402
- periods_calculated += 1
403
- else:
404
- break
405
- avg_volume = total_volume / periods_calculated
406
-
407
- self.logger.debug(f"average volume {ticker}: {avg_volume}")
408
- return avg_volume
@@ -1,14 +1,26 @@
1
1
  import json
2
2
  import logging
3
- from typing import Any, Optional
3
+ from typing import Any, Optional, Iterator, List
4
4
 
5
+ import aiohttp
5
6
  import redis.asyncio as aioredis
7
+ from massive.rest import RESTClient
8
+ from massive.rest.models import (
9
+ TickerSnapshot,
10
+ FinancialRatio,
11
+ )
12
+
13
+ from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
14
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
6
15
 
7
16
 
8
17
  class MarketDataCache:
9
- def __init__(self, redis_client: aioredis.Redis):
18
+ def __init__(self, rest_client: RESTClient, redis_client: aioredis.Redis, massive_api_key: str):
10
19
  self.logger = logging.getLogger(__name__)
20
+ self.rest_client = rest_client
21
+ self.massive_api_key = massive_api_key
11
22
  self.redis_client = redis_client
23
+ self.http_session = None
12
24
 
13
25
  async def get_cache(self, cache_key: str) -> Optional[dict]:
14
26
  """Fetch current value from Redis cache (for snapshot requests)."""
@@ -27,3 +39,105 @@ class MarketDataCache:
27
39
  async def publish_data(self, data: Any, publish_key: str = None):
28
40
  await self.redis_client.publish(publish_key, json.dumps(data))
29
41
  self.logger.debug(f"Published data for {publish_key}")
42
+
43
+ async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
44
+ self.logger.debug(f"Getting snapshot for {ticker}")
45
+ cache_key = f"{MarketDataCacheKeys.TICKER_SNAPSHOTS.value}:{ticker}"
46
+ result = await self.get_cache(cache_key=cache_key)
47
+ if result:
48
+ snapshot = TickerSnapshot.from_dict(**result)
49
+ else:
50
+ snapshot: TickerSnapshot = self.rest_client.get_snapshot_ticker(
51
+ market_type="stocks",
52
+ ticker=ticker
53
+ )
54
+ self.logger.debug(f"Snapshot result: {snapshot}")
55
+ await self.cache_data(
56
+ data=snapshot,
57
+ cache_key=cache_key,
58
+ cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value
59
+ )
60
+ return snapshot
61
+
62
+ async def get_avg_volume(self, ticker: str):
63
+ self.logger.debug(f"Getting average volume for {ticker}")
64
+ cache_key = f"{MarketDataCacheKeys.TICKER_AVG_VOLUME.value}:{ticker}"
65
+ avg_volume = await self.get_cache(cache_key=cache_key)
66
+ if avg_volume:
67
+ self.logger.debug(f"Returning cached value for {ticker}: {avg_volume}")
68
+ return avg_volume
69
+
70
+ results: Iterator[FinancialRatio] = self.rest_client.list_financials_ratios(ticker=ticker)
71
+ ratios: List[FinancialRatio] = []
72
+ for financial_ratio in results:
73
+ ratios.append(financial_ratio)
74
+ if len(ratios) == 1:
75
+ avg_volume = ratios[0].average_volume
76
+ else:
77
+ raise Exception(f"Unexpected number of financial ratios for {ticker}: {len(ratios)}")
78
+
79
+ self.logger.debug(f"average volume {ticker}: {avg_volume}")
80
+ await self.cache_data(
81
+ data=avg_volume,
82
+ cache_key=cache_key,
83
+ cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
84
+ )
85
+ return avg_volume
86
+
87
+ async def get_free_float(self, ticker: str):
88
+ self.logger.debug(f"Getting free float for {ticker}")
89
+ cache_key = f"{MarketDataCacheKeys.TICKER_FREE_FLOAT.value}:{ticker}"
90
+ free_float = await self.get_cache(cache_key=cache_key)
91
+ if free_float:
92
+ self.logger.debug(f"Returning cached value for {ticker}: {free_float}")
93
+ return free_float
94
+
95
+ # NOTE: This endpoint is experimental and the interface may change.
96
+ # https://massive.com/docs/rest/stocks/fundamentals/float
97
+ url = f"https://api.massive.com/stocks/vX/float"
98
+ params = {
99
+ "ticker": ticker,
100
+ "apiKey": self.massive_api_key
101
+ }
102
+
103
+ session = await self.get_http_session()
104
+ try:
105
+ async with session.get(url, params=params, timeout=aiohttp.ClientTimeout(total=10)) as response:
106
+ response.raise_for_status()
107
+ data = await response.json()
108
+
109
+ # Extract free_float from response
110
+ if data.get("status") == "OK" and data.get("results") is not None:
111
+ results = data["results"]
112
+ if len(results) > 0:
113
+ free_float = results[0].get("free_float")
114
+ else:
115
+ raise Exception(f"No free float data returned for {ticker}")
116
+ else:
117
+ raise Exception(f"Invalid response from Massive API for {ticker}: {data}")
118
+
119
+ except aiohttp.ClientError as e:
120
+ self.logger.error(f"HTTP error fetching free float for {ticker}: {e}")
121
+ raise
122
+ except Exception as e:
123
+ self.logger.error(f"Error fetching free float for {ticker}: {e}")
124
+ raise
125
+
126
+ self.logger.debug(f"free float {ticker}: {free_float}")
127
+ await self.cache_data(
128
+ data=free_float,
129
+ cache_key=cache_key,
130
+ cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
131
+ )
132
+ return free_float
133
+
134
+ async def get_http_session(self) -> aiohttp.ClientSession:
135
+ """Get or create aiohttp session for async HTTP requests."""
136
+ if self.http_session is None or self.http_session.closed:
137
+ self.http_session = aiohttp.ClientSession()
138
+ return self.http_session
139
+
140
+ async def close(self):
141
+ """Close aiohttp session."""
142
+ if self.http_session and not self.http_session.closed:
143
+ await self.http_session.close()
@@ -1,17 +1,14 @@
1
1
  import asyncio
2
2
  import json
3
3
  import logging
4
- from typing import Dict
5
4
 
6
5
  import aio_pika
7
- import redis
8
6
  import redis.asyncio as aioredis
9
7
  from aio_pika.abc import AbstractIncomingMessage
10
- from aio_pika.exceptions import AMQPConnectionError
11
8
 
12
9
  from kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
13
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
14
10
  from kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
11
+ from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
15
12
 
16
13
 
17
14
  class MassiveDataProcessor:
@@ -16,6 +16,9 @@ class MarketDataCacheKeys(Enum):
16
16
 
17
17
  # MARKET DATA CACHE
18
18
  DAILY_AGGREGATES = 'aggregate:daily'
19
+ TICKER_SNAPSHOTS = 'snapshots'
20
+ TICKER_AVG_VOLUME = 'avg_volume'
21
+ TICKER_FREE_FLOAT = 'free_float'
19
22
 
20
23
  # MARKET DATA PROCESSOR CACHE
21
24
  TOP_TRADES_SCANNER = f'cache:{MarketDataScannerNames.TOP_TRADES.value}'
@@ -8,6 +8,7 @@ class MarketDataCacheTTL(Enum):
8
8
  FOUR_HOURS = 14400
9
9
  SIX_HOURS = 21600
10
10
  EIGHT_HOURS = 28800
11
+ TWELVE_HOURS = 43200
11
12
 
12
13
  # Days
13
14
  ONE_DAY = 86400
@@ -0,0 +1,143 @@
1
+ from collections import defaultdict
2
+ from dataclasses import dataclass, field
3
+ from typing import Dict, Optional
4
+
5
+
6
+ # docs
7
+ # https://massive.com/docs/stocks/ws_stocks_am
8
+ # https://massive.com/docs/websocket/stocks/trades
9
+
10
+ @dataclass()
11
+ class TopStocksCacheItem:
12
+ day_start_time: Optional[float] = 0.0
13
+
14
+ # Cached details for each ticker
15
+ symbol_data_cache: Optional[Dict[str, dict]] = field(default_factory=lambda: defaultdict(dict))
16
+
17
+ # Top Volume map
18
+ top_volume_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
19
+
20
+ # Top Gappers map
21
+ top_gappers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
22
+
23
+ # Top Gainers map
24
+ top_gainers_map: Optional[Dict[str, float]] = field(default_factory=lambda: defaultdict(dict))
25
+
26
+ def to_dict(self):
27
+ ret = {
28
+ # Cache start time
29
+ "day_start_time": self.day_start_time,
30
+
31
+ # Maps
32
+ "symbol_data_cache": self.symbol_data_cache,
33
+ "top_volume_map": self.top_volume_map,
34
+ "top_gappers_map": self.top_gappers_map,
35
+ "top_gainers_map": self.top_gainers_map,
36
+ }
37
+ return ret
38
+
39
+ def top_volume(self, limit):
40
+ ret = []
41
+ for ticker, volume in sorted(self.top_volume_map.items(), key=lambda x: x[1], reverse=True)[
42
+ :limit
43
+ ]:
44
+ try:
45
+ ret.append({
46
+ "symbol": ticker,
47
+ "volume": self.symbol_data_cache[ticker]["volume"],
48
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
49
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
50
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
51
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
52
+ "open": self.symbol_data_cache[ticker]["open"],
53
+ "close": self.symbol_data_cache[ticker]["close"],
54
+ "high": self.symbol_data_cache[ticker]["high"],
55
+ "low": self.symbol_data_cache[ticker]["low"],
56
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
57
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
58
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
59
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
60
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
61
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
62
+ "change": self.symbol_data_cache[ticker]["change"],
63
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
64
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
65
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
66
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
67
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
68
+ })
69
+ except KeyError:
70
+ del self.top_volume_map[ticker]
71
+ return ret
72
+
73
+ def top_gappers(self, limit):
74
+ ret = []
75
+ for ticker, pct_change in sorted(self.top_gappers_map.items(), key=lambda x: x[1], reverse=True)[
76
+ :limit
77
+ ]:
78
+ try:
79
+ if pct_change <= 0:
80
+ break
81
+ ret.append({
82
+ "symbol": ticker,
83
+ "volume": self.symbol_data_cache[ticker]["volume"],
84
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
85
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
86
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
87
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
88
+ "open": self.symbol_data_cache[ticker]["open"],
89
+ "close": self.symbol_data_cache[ticker]["close"],
90
+ "high": self.symbol_data_cache[ticker]["high"],
91
+ "low": self.symbol_data_cache[ticker]["low"],
92
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
93
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
94
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
95
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
96
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
97
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
98
+ "change": self.symbol_data_cache[ticker]["change"],
99
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
100
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
101
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
102
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
103
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
104
+ })
105
+ except KeyError:
106
+ del self.top_gappers_map[ticker]
107
+ return ret
108
+
109
+ def top_gainers(self, limit):
110
+ ret = []
111
+ for ticker, pct_change in sorted(self.top_gainers_map.items(), key=lambda x: x[1], reverse=True)[
112
+ :limit
113
+ ]:
114
+ try:
115
+ if pct_change <= 0:
116
+ break
117
+ ret.append({
118
+ "symbol": ticker,
119
+ "volume": self.symbol_data_cache[ticker]["volume"],
120
+ "accumulated_volume": self.symbol_data_cache[ticker]["accumulated_volume"],
121
+ "relative_volume": self.symbol_data_cache[ticker]["relative_volume"],
122
+ "official_open_price": self.symbol_data_cache[ticker]["official_open_price"],
123
+ "vwap": self.symbol_data_cache[ticker]["vwap"],
124
+ "open": self.symbol_data_cache[ticker]["open"],
125
+ "close": self.symbol_data_cache[ticker]["close"],
126
+ "high": self.symbol_data_cache[ticker]["high"],
127
+ "low": self.symbol_data_cache[ticker]["low"],
128
+ "aggregate_vwap": self.symbol_data_cache[ticker]["aggregate_vwap"],
129
+ "average_size": self.symbol_data_cache[ticker]["average_size"],
130
+ "avg_volume": self.symbol_data_cache[ticker]["avg_volume"],
131
+ "prev_day_close": self.symbol_data_cache[ticker]["prev_day_close"],
132
+ "prev_day_volume": self.symbol_data_cache[ticker]["prev_day_volume"],
133
+ "prev_day_vwap": self.symbol_data_cache[ticker]["prev_day_vwap"],
134
+ "change": self.symbol_data_cache[ticker]["change"],
135
+ "pct_change": self.symbol_data_cache[ticker]["pct_change"],
136
+ "change_since_open": self.symbol_data_cache[ticker]["change_since_open"],
137
+ "pct_change_since_open": self.symbol_data_cache[ticker]["pct_change_since_open"],
138
+ "start_timestamp": self.symbol_data_cache[ticker]["start_timestamp"],
139
+ "end_timestamp": self.symbol_data_cache[ticker]["end_timestamp"],
140
+ })
141
+ except KeyError:
142
+ del self.top_gainers_map[ticker]
143
+ return ret
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.2
3
+ Version: 0.1.5
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -33,20 +33,22 @@ Project-URL: Source, https://github.com/kuhl-haus/kuhl-haus-mdp.git
33
33
  Project-URL: Changelog, https://github.com/kuhl-haus/kuhl-haus-mdp/commits
34
34
  Project-URL: Tracker, https://github.com/kuhl-haus/kuhl-haus-mdp/issues
35
35
  Requires-Python: <3.13,>=3.9.21
36
- Requires-Dist: websockets
36
+ Requires-Dist: aiohttp
37
37
  Requires-Dist: aio-pika
38
- Requires-Dist: redis[asyncio]
39
- Requires-Dist: tenacity
40
38
  Requires-Dist: fastapi
41
- Requires-Dist: uvicorn[standard]
39
+ Requires-Dist: massive
42
40
  Requires-Dist: pydantic-settings
43
41
  Requires-Dist: python-dotenv
44
- Requires-Dist: massive
42
+ Requires-Dist: redis[asyncio]
43
+ Requires-Dist: tenacity
44
+ Requires-Dist: uvicorn[standard]
45
+ Requires-Dist: websockets
45
46
  Provides-Extra: testing
46
47
  Requires-Dist: setuptools; extra == "testing"
47
48
  Requires-Dist: pdm-backend; extra == "testing"
48
49
  Requires-Dist: pytest; extra == "testing"
49
50
  Requires-Dist: pytest-cov; extra == "testing"
51
+ Requires-Dist: pytest-asyncio; extra == "testing"
50
52
  Description-Content-Type: text/markdown
51
53
 
52
54
  <!-- These are examples of badges you might want to add to your README:
@@ -2,29 +2,30 @@ kuhl_haus/mdp/__init__.py,sha256=5dEpAdB3kypH8tCRECoXwbly1WV9kFU5kh8ldGSa0VI,349
2
2
  kuhl_haus/mdp/analyzers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  kuhl_haus/mdp/analyzers/analyzer.py,sha256=eluYM2Iib5kgbpNZUSk2qEUL-j83ZTb3zmEmRazrmiM,404
4
4
  kuhl_haus/mdp/analyzers/massive_data_analyzer.py,sha256=WSb7T8X4u2ue7Du7sf_fqxjgjEbR6ThllSNT1CncIM0,3866
5
- kuhl_haus/mdp/analyzers/top_stocks.py,sha256=AbRnPHSVrJgUq3CDV8SaNstldqoimlI23gpG69lzYBM,18759
5
+ kuhl_haus/mdp/analyzers/top_stocks.py,sha256=aE8Z4PHkfudM02xLAgqgCMeIGqsioq18SQLs81Z8qvM,9452
6
6
  kuhl_haus/mdp/components/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- kuhl_haus/mdp/components/market_data_cache.py,sha256=r5sJHuSuLiw9BVckW--aWZHHIMqOTCf-pFURA7kef3Q,1070
7
+ kuhl_haus/mdp/components/market_data_cache.py,sha256=EmRDlh_GTKyYDvAbbAPQrE8n91JKNFKH8myAM4UTPLM,5835
8
8
  kuhl_haus/mdp/components/market_data_scanner.py,sha256=vA0HPqVIvuZb93wzJhtER6fcH6bf85AgXCbu7yVFOFE,9152
9
9
  kuhl_haus/mdp/components/widget_data_service.py,sha256=ikygD9NRpidcXBEqft5Q11rHy_eUOwKGyOLEezo-Dd4,7439
10
10
  kuhl_haus/mdp/helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  kuhl_haus/mdp/helpers/process_manager.py,sha256=Is3Jx8nlBWvywQ1acdsdaSJTAG0olKskpPvrRB4VMDE,9024
12
12
  kuhl_haus/mdp/helpers/queue_name_resolver.py,sha256=l_zfRLxrjR9uwRCV2VDO4vPWLK_lj5KVG2p4Lh8xWiw,770
13
+ kuhl_haus/mdp/helpers/utils.py,sha256=9JEpl2yr2LghOLrJUDxi-4dtDK3DZ1wBTZ1uxBJsFbQ,1309
13
14
  kuhl_haus/mdp/integ/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
15
  kuhl_haus/mdp/integ/massive_data_listener.py,sha256=fPEYc6zZzHzFFjbP3zFInajKtEGInj8UQKKo3nKQEwQ,5098
15
- kuhl_haus/mdp/integ/massive_data_processor.py,sha256=qktzLfuqrOgE4C9iZs4mXFvHt2BckgevRP8pEakzggA,8694
16
+ kuhl_haus/mdp/integ/massive_data_processor.py,sha256=H1WlbGtuSF45n7qLTLleuNlG-OlIXz4llJ7q3XRSS-s,8605
16
17
  kuhl_haus/mdp/integ/massive_data_queues.py,sha256=zC_uV2vwZCMyVerDQ18RAQwIMMF75iK4qUSqwuWqgwc,5050
17
- kuhl_haus/mdp/integ/utils.py,sha256=9JEpl2yr2LghOLrJUDxi-4dtDK3DZ1wBTZ1uxBJsFbQ,1309
18
18
  kuhl_haus/mdp/integ/web_socket_message_serde.py,sha256=XdaoaByc7IhtzbPDXBtXKOTjyDzfPSDuZVCoHSIaTl4,5468
19
19
  kuhl_haus/mdp/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  kuhl_haus/mdp/models/market_data_analyzer_result.py,sha256=iICb5GVCtuqARNbR1JNCAfbxMijM3uppDNdL8_FB3eI,422
21
- kuhl_haus/mdp/models/market_data_cache_keys.py,sha256=5iScBMhVQaG3p9P45veE-uRT7c6JY7k6j4DcvSEXENA,942
22
- kuhl_haus/mdp/models/market_data_cache_ttl.py,sha256=4KvsPeg84-sp4viUX6reN8CZYiM2aF9FgfXQmPbj3hw,348
21
+ kuhl_haus/mdp/models/market_data_cache_keys.py,sha256=04nFRdNZtvEeKFnpjZ6CNSu-4MiUgifPXPHGAZhZRsE,1051
22
+ kuhl_haus/mdp/models/market_data_cache_ttl.py,sha256=a43ys3S61Y0ADdb03ThgrRd9x7B1EsI6FplCjecdNLY,373
23
23
  kuhl_haus/mdp/models/market_data_pubsub_keys.py,sha256=PEIPXK9jBehJB7G4pqoSuQZcfMZgOQq8Yho1itqv-1A,1306
24
24
  kuhl_haus/mdp/models/market_data_scanner_names.py,sha256=BYn1C0rYgGF1Sq583BkHADKUu-28ytNZQ-XgptuCH-Y,260
25
25
  kuhl_haus/mdp/models/massive_data_queue.py,sha256=MfYBcjVc4Fi61DWIvvhhWLUOiLmRpE9egtW-2KH6FTE,188
26
- kuhl_haus_mdp-0.1.2.dist-info/METADATA,sha256=iOqk7u1RoyY-mBiIDGVI06TSDGulUuHo0GF9G6JYcdQ,8688
27
- kuhl_haus_mdp-0.1.2.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
28
- kuhl_haus_mdp-0.1.2.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
29
- kuhl_haus_mdp-0.1.2.dist-info/licenses/LICENSE.txt,sha256=DRkJftAJcMqoTkQ_Y6-HtKj3nm4pZah_p8XBZiYnw-c,1079
30
- kuhl_haus_mdp-0.1.2.dist-info/RECORD,,
26
+ kuhl_haus/mdp/models/top_stocks_cache_item.py,sha256=4vwwPTMkRRf1ct6iFInJnLSbBadM-tRk-zhqdD_ITE0,7676
27
+ kuhl_haus_mdp-0.1.5.dist-info/METADATA,sha256=h3NksGfUJk-bpw4YWVtjcCrd8HxrxmDD4PxPn-MTg3k,8761
28
+ kuhl_haus_mdp-0.1.5.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
29
+ kuhl_haus_mdp-0.1.5.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
30
+ kuhl_haus_mdp-0.1.5.dist-info/licenses/LICENSE.txt,sha256=DRkJftAJcMqoTkQ_Y6-HtKj3nm4pZah_p8XBZiYnw-c,1079
31
+ kuhl_haus_mdp-0.1.5.dist-info/RECORD,,
File without changes