kuhl-haus-mdp 0.1.3__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,14 +1,15 @@
1
1
  from typing import Optional, List
2
2
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
3
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
3
4
 
4
5
 
5
6
  class Analyzer:
6
- cache_key: str
7
+ cache: MarketDataCache
7
8
 
8
- def __init__(self, cache_key: str, **kwargs):
9
- self.cache_key = cache_key
9
+ def __init__(self, cache: MarketDataCache, **kwargs):
10
+ self.cache = cache
10
11
 
11
- async def rehydrate(self, data: dict):
12
+ async def rehydrate(self):
12
13
  pass
13
14
 
14
15
  async def analyze_data(self, data: dict) -> Optional[List[MarketDataAnalyzerResult]]:
@@ -16,6 +16,7 @@ from massive.websocket.models import (
16
16
  )
17
17
 
18
18
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
19
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
19
20
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
20
21
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
21
22
  from kuhl_haus.mdp.models.market_data_pubsub_keys import MarketDataPubSubKeys
@@ -24,22 +25,16 @@ from kuhl_haus.mdp.models.top_stocks_cache_item import TopStocksCacheItem
24
25
 
25
26
  class TopStocksAnalyzer(Analyzer):
26
27
 
27
- def __init__(self, rest_client: RESTClient, **kwargs):
28
- if "cache_key" not in kwargs:
29
- kwargs["cache_key"] = MarketDataCacheKeys.TOP_STOCKS_SCANNER.value
30
- super().__init__(**kwargs)
31
- self.rest_client = rest_client
28
+ def __init__(self, cache: MarketDataCache, **kwargs):
29
+ super().__init__(cache=cache, **kwargs)
30
+ self.cache = cache
31
+ self.cache_key = MarketDataCacheKeys.TOP_STOCKS_SCANNER.value
32
32
  self.logger = logging.getLogger(__name__)
33
33
  self.cache_item = TopStocksCacheItem()
34
34
  self.last_update_time = 0
35
35
  self.pre_market_reset = False
36
36
 
37
- async def rehydrate(self, data: dict):
38
- if not data:
39
- self.cache_item = TopStocksCacheItem()
40
- self.logger.info("No data to rehydrate TopStocksCacheItem.")
41
- return
42
-
37
+ async def rehydrate(self):
43
38
  # Get current time in UTC, then convert to Eastern Time
44
39
  utc_now = datetime.now(timezone.utc)
45
40
  et_now = utc_now.astimezone(ZoneInfo("America/New_York"))
@@ -51,6 +46,11 @@ class TopStocksAnalyzer(Analyzer):
51
46
  self.cache_item = TopStocksCacheItem()
52
47
  self.logger.info(f"Outside market hours ({et_now.strftime('%H:%M:%S %Z')}), clearing cache.")
53
48
  return
49
+ data = await self.cache.get_cache(self.cache_key)
50
+ if not data:
51
+ self.cache_item = TopStocksCacheItem()
52
+ self.logger.info("No data to rehydrate TopStocksCacheItem.")
53
+ return
54
54
  self.cache_item = TopStocksCacheItem(**data)
55
55
  self.logger.info("Rehydrated TopStocksCacheItem")
56
56
 
@@ -135,7 +135,7 @@ class TopStocksAnalyzer(Analyzer):
135
135
  prev_day_vwap = 0
136
136
  while retry_count < max_tries:
137
137
  try:
138
- snapshot = await self.get_ticker_snapshot(event.symbol)
138
+ snapshot = await self.cache.get_ticker_snapshot(event.symbol)
139
139
  prev_day_close = snapshot.prev_day.close
140
140
  prev_day_volume = snapshot.prev_day.volume
141
141
  prev_day_vwap = snapshot.prev_day.vwap
@@ -153,7 +153,7 @@ class TopStocksAnalyzer(Analyzer):
153
153
  avg_volume = 0
154
154
  while retry_count < max_tries:
155
155
  try:
156
- avg_volume = await self.get_avg_volume(event.symbol)
156
+ avg_volume = await self.cache.get_avg_volume(event.symbol)
157
157
  break
158
158
  except (BadResponse, ZeroDivisionError) as e:
159
159
  self.logger.error(f"Error getting average volume for {event.symbol}: {repr(e)}", exc_info=e, stack_info=True)
@@ -217,44 +217,3 @@ class TopStocksAnalyzer(Analyzer):
217
217
  "start_timestamp": event.start_timestamp,
218
218
  "end_timestamp": event.end_timestamp,
219
219
  }
220
-
221
- async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
222
- self.logger.debug(f"Getting snapshot for {ticker}")
223
- result: TickerSnapshot = self.rest_client.get_snapshot_ticker(
224
- market_type="stocks",
225
- ticker=ticker
226
- )
227
- self.logger.debug(f"Snapshot result: {result}")
228
- return result
229
-
230
- async def get_avg_volume(self, ticker: str):
231
- self.logger.debug(f"Getting average volume for {ticker}")
232
- # Get date string in YYYY-MM-DD format
233
- end_date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
234
- # Get date from 30 trading sessions ago in YYYY-MM-DD format
235
- start_date = (datetime.now(timezone.utc) - timedelta(days=42)).strftime("%Y-%m-%d")
236
-
237
- result: Iterator[Agg] = self.rest_client.list_aggs(
238
- ticker=ticker,
239
- multiplier=1,
240
- timespan="day",
241
- from_=start_date,
242
- to=end_date,
243
- adjusted=True,
244
- sort="desc"
245
- )
246
- self.logger.debug(f"average volume result: {result}")
247
-
248
- total_volume = 0
249
- max_periods = 30
250
- periods_calculated = 0
251
- for agg in result:
252
- if periods_calculated < max_periods:
253
- total_volume += agg.volume
254
- periods_calculated += 1
255
- else:
256
- break
257
- avg_volume = total_volume / periods_calculated
258
-
259
- self.logger.debug(f"average volume {ticker}: {avg_volume}")
260
- return avg_volume
@@ -1,14 +1,26 @@
1
1
  import json
2
2
  import logging
3
- from typing import Any, Optional
3
+ from typing import Any, Optional, Iterator, List
4
4
 
5
+ import aiohttp
5
6
  import redis.asyncio as aioredis
7
+ from massive.rest import RESTClient
8
+ from massive.rest.models import (
9
+ TickerSnapshot,
10
+ FinancialRatio,
11
+ )
12
+
13
+ from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
14
+ from kuhl_haus.mdp.models.market_data_cache_ttl import MarketDataCacheTTL
6
15
 
7
16
 
8
17
  class MarketDataCache:
9
- def __init__(self, redis_client: aioredis.Redis):
18
+ def __init__(self, rest_client: RESTClient, redis_client: aioredis.Redis, massive_api_key: str):
10
19
  self.logger = logging.getLogger(__name__)
20
+ self.rest_client = rest_client
21
+ self.massive_api_key = massive_api_key
11
22
  self.redis_client = redis_client
23
+ self.http_session = None
12
24
 
13
25
  async def get_cache(self, cache_key: str) -> Optional[dict]:
14
26
  """Fetch current value from Redis cache (for snapshot requests)."""
@@ -27,3 +39,105 @@ class MarketDataCache:
27
39
  async def publish_data(self, data: Any, publish_key: str = None):
28
40
  await self.redis_client.publish(publish_key, json.dumps(data))
29
41
  self.logger.debug(f"Published data for {publish_key}")
42
+
43
+ async def get_ticker_snapshot(self, ticker: str) -> TickerSnapshot:
44
+ self.logger.debug(f"Getting snapshot for {ticker}")
45
+ cache_key = f"{MarketDataCacheKeys.TICKER_SNAPSHOTS.value}:{ticker}"
46
+ result = await self.get_cache(cache_key=cache_key)
47
+ if result:
48
+ snapshot = TickerSnapshot.from_dict(**result)
49
+ else:
50
+ snapshot: TickerSnapshot = self.rest_client.get_snapshot_ticker(
51
+ market_type="stocks",
52
+ ticker=ticker
53
+ )
54
+ self.logger.debug(f"Snapshot result: {snapshot}")
55
+ await self.cache_data(
56
+ data=snapshot,
57
+ cache_key=cache_key,
58
+ cache_ttl=MarketDataCacheTTL.EIGHT_HOURS.value
59
+ )
60
+ return snapshot
61
+
62
+ async def get_avg_volume(self, ticker: str):
63
+ self.logger.debug(f"Getting average volume for {ticker}")
64
+ cache_key = f"{MarketDataCacheKeys.TICKER_AVG_VOLUME.value}:{ticker}"
65
+ avg_volume = await self.get_cache(cache_key=cache_key)
66
+ if avg_volume:
67
+ self.logger.debug(f"Returning cached value for {ticker}: {avg_volume}")
68
+ return avg_volume
69
+
70
+ results: Iterator[FinancialRatio] = self.rest_client.list_financials_ratios(ticker=ticker)
71
+ ratios: List[FinancialRatio] = []
72
+ for financial_ratio in results:
73
+ ratios.append(financial_ratio)
74
+ if len(ratios) == 1:
75
+ avg_volume = ratios[0].average_volume
76
+ else:
77
+ raise Exception(f"Unexpected number of financial ratios for {ticker}: {len(ratios)}")
78
+
79
+ self.logger.debug(f"average volume {ticker}: {avg_volume}")
80
+ await self.cache_data(
81
+ data=avg_volume,
82
+ cache_key=cache_key,
83
+ cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
84
+ )
85
+ return avg_volume
86
+
87
+ async def get_free_float(self, ticker: str):
88
+ self.logger.debug(f"Getting free float for {ticker}")
89
+ cache_key = f"{MarketDataCacheKeys.TICKER_FREE_FLOAT.value}:{ticker}"
90
+ free_float = await self.get_cache(cache_key=cache_key)
91
+ if free_float:
92
+ self.logger.debug(f"Returning cached value for {ticker}: {free_float}")
93
+ return free_float
94
+
95
+ # NOTE: This endpoint is experimental and the interface may change.
96
+ # https://massive.com/docs/rest/stocks/fundamentals/float
97
+ url = f"https://api.massive.com/stocks/vX/float"
98
+ params = {
99
+ "ticker": ticker,
100
+ "apiKey": self.massive_api_key
101
+ }
102
+
103
+ session = await self.get_http_session()
104
+ try:
105
+ async with session.get(url, params=params, timeout=aiohttp.ClientTimeout(total=10)) as response:
106
+ response.raise_for_status()
107
+ data = await response.json()
108
+
109
+ # Extract free_float from response
110
+ if data.get("status") == "OK" and data.get("results") is not None:
111
+ results = data["results"]
112
+ if len(results) > 0:
113
+ free_float = results[0].get("free_float")
114
+ else:
115
+ raise Exception(f"No free float data returned for {ticker}")
116
+ else:
117
+ raise Exception(f"Invalid response from Massive API for {ticker}: {data}")
118
+
119
+ except aiohttp.ClientError as e:
120
+ self.logger.error(f"HTTP error fetching free float for {ticker}: {e}")
121
+ raise
122
+ except Exception as e:
123
+ self.logger.error(f"Error fetching free float for {ticker}: {e}")
124
+ raise
125
+
126
+ self.logger.debug(f"free float {ticker}: {free_float}")
127
+ await self.cache_data(
128
+ data=free_float,
129
+ cache_key=cache_key,
130
+ cache_ttl=MarketDataCacheTTL.TWELVE_HOURS.value
131
+ )
132
+ return free_float
133
+
134
+ async def get_http_session(self) -> aiohttp.ClientSession:
135
+ """Get or create aiohttp session for async HTTP requests."""
136
+ if self.http_session is None or self.http_session.closed:
137
+ self.http_session = aiohttp.ClientSession()
138
+ return self.http_session
139
+
140
+ async def close(self):
141
+ """Close aiohttp session."""
142
+ if self.http_session and not self.http_session.closed:
143
+ await self.http_session.close()
@@ -1,13 +1,16 @@
1
1
  import asyncio
2
2
  import json
3
3
  import logging
4
- from typing import Union, Optional, List
4
+ from typing import Any, Union, Optional, List
5
5
 
6
6
  import redis.asyncio as aioredis
7
7
  from redis.exceptions import ConnectionError
8
8
 
9
+ from massive.rest import RESTClient
10
+
9
11
  from kuhl_haus.mdp.analyzers.analyzer import Analyzer
10
12
  from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
13
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
11
14
 
12
15
 
13
16
  class MarketDataScanner:
@@ -18,11 +21,14 @@ class MarketDataScanner:
18
21
  error: int
19
22
  restarts: int
20
23
 
21
- def __init__(self, redis_url: str, analyzer: Analyzer, subscriptions: List[str]):
24
+ def __init__(self, redis_url: str, massive_api_key: str, subscriptions: List[str], analyzer_class: Any):
22
25
  self.redis_url = redis_url
23
- self.analyzer = analyzer
26
+ self.massive_api_key = massive_api_key
24
27
  self.logger = logging.getLogger(__name__)
25
28
 
29
+ self.analyzer: Analyzer = None
30
+ self.analyzer_class = analyzer_class
31
+
26
32
  # Connection objects
27
33
  self.redis_client = None # : aioredis.Redis = None
28
34
  self.pubsub_client: Optional[aioredis.client.PubSub] = None
@@ -30,6 +36,7 @@ class MarketDataScanner:
30
36
  # State
31
37
  self.mdc_connected = False
32
38
  self.running = False
39
+ self.mdc: Optional[MarketDataCache] = None
33
40
 
34
41
  self.subscriptions: List[str] = subscriptions
35
42
  self._pubsub_task: Union[asyncio.Task, None] = None
@@ -48,9 +55,9 @@ class MarketDataScanner:
48
55
  await self.connect()
49
56
  self.pubsub_client = self.redis_client.pubsub()
50
57
 
51
- scanner_cache = await self.get_cache(self.analyzer.cache_key)
58
+ self.analyzer = self.analyzer_class(cache=self.mdc)
52
59
  self.logger.info(f"mds rehydrating from cache")
53
- await self.analyzer.rehydrate(scanner_cache)
60
+ await self.analyzer.rehydrate()
54
61
  self.logger.info("mds rehydration complete")
55
62
 
56
63
  for subscription in self.subscriptions:
@@ -73,6 +80,10 @@ class MarketDataScanner:
73
80
  pass
74
81
  self._pubsub_task = None
75
82
 
83
+ if self.mdc:
84
+ await self.mdc.close()
85
+ self.mdc = None
86
+
76
87
  if self.pubsub_client:
77
88
  for subscription in self.subscriptions:
78
89
  if subscription.endswith("*"):
@@ -104,6 +115,11 @@ class MarketDataScanner:
104
115
 
105
116
  # Test Redis connection
106
117
  await self.redis_client.ping()
118
+ self.mdc = MarketDataCache(
119
+ rest_client=RESTClient(api_key=self.massive_api_key),
120
+ redis_client=self.redis_client,
121
+ massive_api_key=self.massive_api_key
122
+ )
107
123
  self.mdc_connected = True
108
124
  self.logger.debug(f"Connected to Redis: {self.redis_url}")
109
125
  except Exception as e:
@@ -1,17 +1,14 @@
1
1
  import asyncio
2
2
  import json
3
3
  import logging
4
- from typing import Dict
5
4
 
6
5
  import aio_pika
7
- import redis
8
6
  import redis.asyncio as aioredis
9
7
  from aio_pika.abc import AbstractIncomingMessage
10
- from aio_pika.exceptions import AMQPConnectionError
11
8
 
12
9
  from kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
13
- from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
14
10
  from kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
11
+ from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
15
12
 
16
13
 
17
14
  class MassiveDataProcessor:
@@ -16,6 +16,9 @@ class MarketDataCacheKeys(Enum):
16
16
 
17
17
  # MARKET DATA CACHE
18
18
  DAILY_AGGREGATES = 'aggregate:daily'
19
+ TICKER_SNAPSHOTS = 'snapshots'
20
+ TICKER_AVG_VOLUME = 'avg_volume'
21
+ TICKER_FREE_FLOAT = 'free_float'
19
22
 
20
23
  # MARKET DATA PROCESSOR CACHE
21
24
  TOP_TRADES_SCANNER = f'cache:{MarketDataScannerNames.TOP_TRADES.value}'
@@ -8,6 +8,7 @@ class MarketDataCacheTTL(Enum):
8
8
  FOUR_HOURS = 14400
9
9
  SIX_HOURS = 21600
10
10
  EIGHT_HOURS = 28800
11
+ TWELVE_HOURS = 43200
11
12
 
12
13
  # Days
13
14
  ONE_DAY = 86400
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kuhl-haus-mdp
3
- Version: 0.1.3
3
+ Version: 0.1.6
4
4
  Summary: Market data processing pipeline for stock market scanner
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License: The MIT License (MIT)
@@ -33,20 +33,22 @@ Project-URL: Source, https://github.com/kuhl-haus/kuhl-haus-mdp.git
33
33
  Project-URL: Changelog, https://github.com/kuhl-haus/kuhl-haus-mdp/commits
34
34
  Project-URL: Tracker, https://github.com/kuhl-haus/kuhl-haus-mdp/issues
35
35
  Requires-Python: <3.13,>=3.9.21
36
- Requires-Dist: websockets
36
+ Requires-Dist: aiohttp
37
37
  Requires-Dist: aio-pika
38
- Requires-Dist: redis[asyncio]
39
- Requires-Dist: tenacity
40
38
  Requires-Dist: fastapi
41
- Requires-Dist: uvicorn[standard]
39
+ Requires-Dist: massive
42
40
  Requires-Dist: pydantic-settings
43
41
  Requires-Dist: python-dotenv
44
- Requires-Dist: massive
42
+ Requires-Dist: redis[asyncio]
43
+ Requires-Dist: tenacity
44
+ Requires-Dist: uvicorn[standard]
45
+ Requires-Dist: websockets
45
46
  Provides-Extra: testing
46
47
  Requires-Dist: setuptools; extra == "testing"
47
48
  Requires-Dist: pdm-backend; extra == "testing"
48
49
  Requires-Dist: pytest; extra == "testing"
49
50
  Requires-Dist: pytest-cov; extra == "testing"
51
+ Requires-Dist: pytest-asyncio; extra == "testing"
50
52
  Description-Content-Type: text/markdown
51
53
 
52
54
  <!-- These are examples of badges you might want to add to your README:
@@ -66,7 +68,7 @@ Description-Content-Type: text/markdown
66
68
  [![codecov](https://codecov.io/gh/kuhl-haus/kuhl-haus-mdp/branch/mainline/graph/badge.svg)](https://codecov.io/gh/kuhl-haus/kuhl-haus-mdp)
67
69
  [![GitHub issues](https://img.shields.io/github/issues/kuhl-haus/kuhl-haus-mdp)](https://github.com/kuhl-haus/kuhl-haus-mdp/issues)
68
70
  [![GitHub pull requests](https://img.shields.io/github/issues-pr/kuhl-haus/kuhl-haus-mdp)](https://github.com/kuhl-haus/kuhl-haus-mdp/pulls)
69
-
71
+ [![Documentation](https://readthedocs.org/projects/kuhl-haus-mdp/badge/?version=latest)](https://kuhl-haus-mdp.readthedocs.io/en/latest/)
70
72
 
71
73
  # kuhl-haus-mdp
72
74
 
@@ -1,31 +1,31 @@
1
1
  kuhl_haus/mdp/__init__.py,sha256=5dEpAdB3kypH8tCRECoXwbly1WV9kFU5kh8ldGSa0VI,349
2
2
  kuhl_haus/mdp/analyzers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- kuhl_haus/mdp/analyzers/analyzer.py,sha256=eluYM2Iib5kgbpNZUSk2qEUL-j83ZTb3zmEmRazrmiM,404
3
+ kuhl_haus/mdp/analyzers/analyzer.py,sha256=rIU1lcHwP2IBai0QLt0y-4ySg_ibWsutNU8JUgSxa1U,471
4
4
  kuhl_haus/mdp/analyzers/massive_data_analyzer.py,sha256=WSb7T8X4u2ue7Du7sf_fqxjgjEbR6ThllSNT1CncIM0,3866
5
- kuhl_haus/mdp/analyzers/top_stocks.py,sha256=nvNA-NkxMjVO0MqFuAvG-v3UdSP7iWDRGI7GxpPBzWw,10876
5
+ kuhl_haus/mdp/analyzers/top_stocks.py,sha256=GvNSa7yWZZ7WUgpaV2t1nVOxWA2R2qpISy16x2RGaQ8,9463
6
6
  kuhl_haus/mdp/components/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- kuhl_haus/mdp/components/market_data_cache.py,sha256=r5sJHuSuLiw9BVckW--aWZHHIMqOTCf-pFURA7kef3Q,1070
8
- kuhl_haus/mdp/components/market_data_scanner.py,sha256=vA0HPqVIvuZb93wzJhtER6fcH6bf85AgXCbu7yVFOFE,9152
7
+ kuhl_haus/mdp/components/market_data_cache.py,sha256=EmRDlh_GTKyYDvAbbAPQrE8n91JKNFKH8myAM4UTPLM,5835
8
+ kuhl_haus/mdp/components/market_data_scanner.py,sha256=45MgprFlq03MvmIRYXENsrc7UlTcBE_hIsPyOvNs1zc,9745
9
9
  kuhl_haus/mdp/components/widget_data_service.py,sha256=ikygD9NRpidcXBEqft5Q11rHy_eUOwKGyOLEezo-Dd4,7439
10
10
  kuhl_haus/mdp/helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  kuhl_haus/mdp/helpers/process_manager.py,sha256=Is3Jx8nlBWvywQ1acdsdaSJTAG0olKskpPvrRB4VMDE,9024
12
12
  kuhl_haus/mdp/helpers/queue_name_resolver.py,sha256=l_zfRLxrjR9uwRCV2VDO4vPWLK_lj5KVG2p4Lh8xWiw,770
13
+ kuhl_haus/mdp/helpers/utils.py,sha256=9JEpl2yr2LghOLrJUDxi-4dtDK3DZ1wBTZ1uxBJsFbQ,1309
13
14
  kuhl_haus/mdp/integ/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
15
  kuhl_haus/mdp/integ/massive_data_listener.py,sha256=fPEYc6zZzHzFFjbP3zFInajKtEGInj8UQKKo3nKQEwQ,5098
15
- kuhl_haus/mdp/integ/massive_data_processor.py,sha256=qktzLfuqrOgE4C9iZs4mXFvHt2BckgevRP8pEakzggA,8694
16
+ kuhl_haus/mdp/integ/massive_data_processor.py,sha256=H1WlbGtuSF45n7qLTLleuNlG-OlIXz4llJ7q3XRSS-s,8605
16
17
  kuhl_haus/mdp/integ/massive_data_queues.py,sha256=zC_uV2vwZCMyVerDQ18RAQwIMMF75iK4qUSqwuWqgwc,5050
17
- kuhl_haus/mdp/integ/utils.py,sha256=9JEpl2yr2LghOLrJUDxi-4dtDK3DZ1wBTZ1uxBJsFbQ,1309
18
18
  kuhl_haus/mdp/integ/web_socket_message_serde.py,sha256=XdaoaByc7IhtzbPDXBtXKOTjyDzfPSDuZVCoHSIaTl4,5468
19
19
  kuhl_haus/mdp/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  kuhl_haus/mdp/models/market_data_analyzer_result.py,sha256=iICb5GVCtuqARNbR1JNCAfbxMijM3uppDNdL8_FB3eI,422
21
- kuhl_haus/mdp/models/market_data_cache_keys.py,sha256=5iScBMhVQaG3p9P45veE-uRT7c6JY7k6j4DcvSEXENA,942
22
- kuhl_haus/mdp/models/market_data_cache_ttl.py,sha256=4KvsPeg84-sp4viUX6reN8CZYiM2aF9FgfXQmPbj3hw,348
21
+ kuhl_haus/mdp/models/market_data_cache_keys.py,sha256=04nFRdNZtvEeKFnpjZ6CNSu-4MiUgifPXPHGAZhZRsE,1051
22
+ kuhl_haus/mdp/models/market_data_cache_ttl.py,sha256=a43ys3S61Y0ADdb03ThgrRd9x7B1EsI6FplCjecdNLY,373
23
23
  kuhl_haus/mdp/models/market_data_pubsub_keys.py,sha256=PEIPXK9jBehJB7G4pqoSuQZcfMZgOQq8Yho1itqv-1A,1306
24
24
  kuhl_haus/mdp/models/market_data_scanner_names.py,sha256=BYn1C0rYgGF1Sq583BkHADKUu-28ytNZQ-XgptuCH-Y,260
25
25
  kuhl_haus/mdp/models/massive_data_queue.py,sha256=MfYBcjVc4Fi61DWIvvhhWLUOiLmRpE9egtW-2KH6FTE,188
26
26
  kuhl_haus/mdp/models/top_stocks_cache_item.py,sha256=4vwwPTMkRRf1ct6iFInJnLSbBadM-tRk-zhqdD_ITE0,7676
27
- kuhl_haus_mdp-0.1.3.dist-info/METADATA,sha256=0SI4PelAQU2MlicnUa8LtzqrKLCKcNQxfONkjQldC3g,8688
28
- kuhl_haus_mdp-0.1.3.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
29
- kuhl_haus_mdp-0.1.3.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
30
- kuhl_haus_mdp-0.1.3.dist-info/licenses/LICENSE.txt,sha256=DRkJftAJcMqoTkQ_Y6-HtKj3nm4pZah_p8XBZiYnw-c,1079
31
- kuhl_haus_mdp-0.1.3.dist-info/RECORD,,
27
+ kuhl_haus_mdp-0.1.6.dist-info/METADATA,sha256=ecQ-j_82U-8JfxrpBMpJovbruVY2VSitRS-BgzG_cb8,8898
28
+ kuhl_haus_mdp-0.1.6.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
29
+ kuhl_haus_mdp-0.1.6.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
30
+ kuhl_haus_mdp-0.1.6.dist-info/licenses/LICENSE.txt,sha256=DRkJftAJcMqoTkQ_Y6-HtKj3nm4pZah_p8XBZiYnw-c,1079
31
+ kuhl_haus_mdp-0.1.6.dist-info/RECORD,,
File without changes