kuhl-haus-mdp-servers 0.1.5__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kuhl-haus-mdp-servers
3
- Version: 0.1.5
3
+ Version: 0.1.6
4
4
  Summary: Container image build repository for market data processing servers
5
5
  Author-Email: Tom Pounders <git@oldschool.engineer>
6
6
  License-File: LICENSE.txt
@@ -34,6 +34,7 @@ Description-Content-Type: text/markdown
34
34
  [![PyPI](https://img.shields.io/pypi/v/kuhl-haus-mdp-servers.svg)](https://pypi.org/project/kuhl-haus-mdp-servers/)
35
35
  [![release](https://img.shields.io/github/v/release/kuhl-haus/kuhl-haus-mdp-servers?style=flat-square)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/releases)
36
36
  [![Build Status](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml)
37
+ [![Publish to PyPI](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/publish-to-pypi.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/publish-to-pypi.yml)
37
38
  [![CodeQL Advanced](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml)
38
39
  [![Downloads](https://static.pepy.tech/badge/kuhl-haus-mdp-servers/month)](https://pepy.tech/project/kuhl-haus-mdp-servers)
39
40
  [![GitHub last commit](https://img.shields.io/github/last-commit/kuhl-haus/kuhl-haus-mdp-servers)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/branches)
@@ -3,6 +3,7 @@
3
3
  [![PyPI](https://img.shields.io/pypi/v/kuhl-haus-mdp-servers.svg)](https://pypi.org/project/kuhl-haus-mdp-servers/)
4
4
  [![release](https://img.shields.io/github/v/release/kuhl-haus/kuhl-haus-mdp-servers?style=flat-square)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/releases)
5
5
  [![Build Status](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml)
6
+ [![Publish to PyPI](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/publish-to-pypi.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/publish-to-pypi.yml)
6
7
  [![CodeQL Advanced](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml)
7
8
  [![Downloads](https://static.pepy.tech/badge/kuhl-haus-mdp-servers/month)](https://pepy.tech/project/kuhl-haus-mdp-servers)
8
9
  [![GitHub last commit](https://img.shields.io/github/last-commit/kuhl-haus/kuhl-haus-mdp-servers)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/branches)
@@ -33,7 +33,7 @@ dependencies = [
33
33
  "python-dotenv",
34
34
  "massive",
35
35
  ]
36
- version = "0.1.5"
36
+ version = "0.1.6"
37
37
 
38
38
  [project.urls]
39
39
  Homepage = "https://github.com/kuhl-haus/kuhl-haus-mdp-servers"
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import json
2
3
  import os
3
4
  from contextlib import asynccontextmanager
4
5
  from copy import copy
@@ -10,7 +11,7 @@ from pydantic_settings import BaseSettings
10
11
 
11
12
  from kuhl_haus.mdp.integ.massive_data_queues import MassiveDataQueues
12
13
  from kuhl_haus.mdp.integ.massive_data_listener import MassiveDataListener
13
- from kuhl_haus.mdp.integ.utils import get_massive_api_key
14
+ from kuhl_haus.mdp.helpers.utils import get_massive_api_key
14
15
 
15
16
 
16
17
  class Settings(BaseSettings):
@@ -22,7 +23,11 @@ class Settings(BaseSettings):
22
23
  # The default values can be overridden via environment variable; use the API to manage at runtime.
23
24
  feed: Union[str, Feed] = os.environ.get("MASSIVE_FEED", Feed.RealTime)
24
25
  market: Union[str, Market] = os.environ.get("MASSIVE_MARKET", Market.Stocks)
25
- subscriptions: Optional[List[str]] = os.environ.get("MASSIVE_SUBSCRIPTIONS", ["AM.*"])
26
+ subscriptions: Optional[List[str]] = (
27
+ json.loads(os.environ.get("MASSIVE_SUBSCRIPTIONS", '["AM.*"]'))
28
+ if os.environ.get("MASSIVE_SUBSCRIPTIONS")
29
+ else ["AM.*"]
30
+ )
26
31
 
27
32
  # Additional Massive/Polygon.io Settings - default values can be overridden via environment variables
28
33
  raw: bool = os.environ.get("MASSIVE_RAW", False)
@@ -4,7 +4,7 @@ import os
4
4
  from contextlib import asynccontextmanager
5
5
  from typing import Dict, Union
6
6
 
7
- # import redis.asyncio as aioredis
7
+ import redis.asyncio as aioredis
8
8
  from fastapi import FastAPI, Response, status
9
9
  from fastapi.responses import RedirectResponse
10
10
  from pydantic_settings import BaseSettings
@@ -13,12 +13,12 @@ from massive.rest import RESTClient
13
13
 
14
14
  from kuhl_haus.mdp.analyzers.top_stocks import TopStocksAnalyzer
15
15
  from kuhl_haus.mdp.components.market_data_scanner import MarketDataScanner
16
- # from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
16
+ from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
17
17
  from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
18
18
  from kuhl_haus.mdp.models.market_data_scanner_names import MarketDataScannerNames
19
19
  from kuhl_haus.mdp.models.massive_data_queue import MassiveDataQueue
20
20
  from kuhl_haus.mdp.integ.massive_data_processor import MassiveDataProcessor
21
- from kuhl_haus.mdp.integ.utils import get_massive_api_key
21
+ from kuhl_haus.mdp.helpers.utils import get_massive_api_key
22
22
  from kuhl_haus.mdp.helpers.process_manager import ProcessManager
23
23
 
24
24
 
@@ -52,7 +52,7 @@ logger = logging.getLogger(__name__)
52
52
 
53
53
 
54
54
  # Global state
55
- # market_data_cache: MarketDataCache = None
55
+ market_data_cache: MarketDataCache = None
56
56
  market_data_scanners: Dict[str, MarketDataScanner] = {}
57
57
  massive_data_processors: Dict[str, MassiveDataProcessor] = {}
58
58
  massive_data_queues = [
@@ -71,7 +71,7 @@ process_manager: ProcessManager = None
71
71
  @asynccontextmanager
72
72
  async def lifespan(app: FastAPI):
73
73
  """Startup and shutdown events"""
74
- global process_manager
74
+ global process_manager, market_data_cache
75
75
 
76
76
  logger.info("Starting Market Data Processor...")
77
77
  process_manager = ProcessManager()
@@ -85,27 +85,15 @@ async def lifespan(app: FastAPI):
85
85
  queue_name=queue,
86
86
  redis_url=settings.redis_url,
87
87
  )
88
- # # Market Data Cache
89
- # redis_client = aioredis.from_url(
90
- # settings.redis_url,
91
- # encoding="utf-8",
92
- # decode_responses=True,
93
- # max_connections=1000,
94
- # socket_connect_timeout=10, # Add timeout
95
- # )
96
- # market_data_cache = MarketDataCache(redis_client=redis_client)
97
- #
98
- # # TODO: Create a component to fetch company information from FMP.
99
- #
88
+
100
89
  # Start MarketDataScanners in separate processes
101
90
  process_manager.start_worker(
102
91
  name=f"scanner_{MarketDataScannerNames.TOP_STOCKS.value}",
103
92
  worker_class=MarketDataScanner,
104
93
  redis_url=settings.redis_url,
105
- analyzer=TopStocksAnalyzer(
106
- rest_client=RESTClient(api_key=settings.massive_api_key)
107
- ),
108
- subscriptions=[f"{MarketDataCacheKeys.AGGREGATE.value}:*"]
94
+ massive_api_key=settings.massive_api_key,
95
+ subscriptions=[f"{MarketDataCacheKeys.AGGREGATE.value}:*"],
96
+ analyzer_class=TopStocksAnalyzer,
109
97
  )
110
98
 
111
99
  logger.info("Market Data Processor is running.")
@@ -41,14 +41,16 @@ logger = logging.getLogger(__name__)
41
41
 
42
42
  # Global service instance
43
43
  wds_service: WidgetDataService = None
44
+ active_ws_clients: Set[WebSocket] = set()
44
45
 
45
46
 
46
47
  @asynccontextmanager
47
48
  async def lifespan(app: FastAPI):
48
49
  """Manage WDS lifecycle."""
49
- global wds_service
50
+ global wds_service, active_ws_clients
50
51
 
51
52
  # Startup
53
+ active_ws_clients.clear()
52
54
  redis_client = redis.from_url(
53
55
  settings.redis_url,
54
56
  encoding="utf-8",
@@ -61,6 +63,7 @@ async def lifespan(app: FastAPI):
61
63
  yield
62
64
 
63
65
  # Shutdown
66
+ active_ws_clients.clear()
64
67
  await wds_service.stop()
65
68
  await pubsub_client.close()
66
69
  await redis_client.close()
@@ -88,6 +91,7 @@ async def health_check(response: Response):
88
91
  "status": "OK",
89
92
  "container_image": settings.container_image,
90
93
  "image_version": settings.image_version,
94
+ "active_ws_clients": len(active_ws_clients),
91
95
  })
92
96
  except Exception as e:
93
97
  logger.error(f"Fatal error while processing health check: {e}")
@@ -122,10 +126,22 @@ async def websocket_endpoint(websocket: WebSocket):
122
126
 
123
127
  if action == "auth":
124
128
  api_key = data.get("api_key")
129
+ # NOTE: This service is designed for internal use and for a
130
+ # single-user. As such, authentication is optional and, if
131
+ # enabled, only supports a single API key, which is set in the
132
+ # AUTH_API_KEY environment variable. Adding support for
133
+ # user-specific API keys is non-trivial.
134
+ # At some point in the future, I may consider adding a more
135
+ # robust authentication system, but this is acceptable for now.
136
+ #
137
+ # [FEATURE] Support for user-specific API keys in Widget Data Service
138
+ # https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues/1
139
+
125
140
  if api_key == settings.auth_api_key:
126
141
  authenticated = True
127
142
  logger.info(f"wds.ws.authenticated client_info:{client_info}")
128
143
  await websocket.send_json({"status": "authorized"})
144
+ active_ws_clients.add(websocket)
129
145
  else:
130
146
  await websocket.send_json({"status": "invalid key"})
131
147
  await websocket.close()
@@ -185,6 +201,11 @@ async def websocket_endpoint(websocket: WebSocket):
185
201
  logger.exception(f"wds.ws.unhandled_exception {repr(e)}", exc_info=True)
186
202
 
187
203
  finally:
204
+ # Note: the set.remove() method will raise a KeyError if the websocket
205
+ # is not present in the set. Using set.discard(), which will remove
206
+ # the websocket from active_ws_clients if it is present but will not
207
+ # raise an exception.
208
+ active_ws_clients.discard(websocket)
188
209
  # Clean up all subscriptions for this client
189
210
  for feed in active_feeds:
190
211
  await wds_service.unsubscribe(feed, websocket)