kuhl-haus-mdp-servers 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,10 @@
1
+ from importlib_metadata import PackageNotFoundError, version # pragma: no cover
2
+
3
+ try:
4
+ # Change here if project is renamed and does not equal the package name
5
+ dist_name = __name__
6
+ __version__ = version(dist_name)
7
+ except PackageNotFoundError: # pragma: no cover
8
+ __version__ = "unknown"
9
+ finally:
10
+ del version, PackageNotFoundError
@@ -0,0 +1,258 @@
1
+ import logging
2
+ import os
3
+ from contextlib import asynccontextmanager
4
+ from copy import copy
5
+ from typing import Optional, List, Union
6
+
7
+ from fastapi import FastAPI, Response, status
8
+ from massive.websocket import Feed, Market
9
+ from pydantic_settings import BaseSettings
10
+
11
+ from kuhl_haus.mdp.integ.massive_data_queues import MassiveDataQueues
12
+ from kuhl_haus.mdp.integ.massive_data_listener import MassiveDataListener
13
+ from kuhl_haus.mdp.integ.utils import get_massive_api_key
14
+
15
+
16
+ class Settings(BaseSettings):
17
+ # TODO: Retrieve Massive client settings from Service Control Plane API call
18
+ # Massive/Polygon.io API Key
19
+ massive_api_key: str = get_massive_api_key()
20
+
21
+ # Massive/Polygon.io Subscription Settings
22
+ # The default values can be overridden via environment variable; use the API to manage at runtime.
23
+ feed: Union[str, Feed] = os.environ.get("MASSIVE_FEED", Feed.RealTime)
24
+ market: Union[str, Market] = os.environ.get("MASSIVE_MARKET", Market.Stocks)
25
+ subscriptions: Optional[List[str]] = os.environ.get("MASSIVE_SUBSCRIPTIONS", ["AM.*"])
26
+
27
+ # Additional Massive/Polygon.io Settings - default values can be overridden via environment variables
28
+ raw: bool = os.environ.get("MASSIVE_RAW", False)
29
+ verbose: bool = os.environ.get("MASSIVE_VERBOSE", False)
30
+ max_reconnects: Optional[int] = os.environ.get("MASSIVE_MAX_RECONNECTS", 5)
31
+ secure: bool = os.environ.get("MASSIVE_SECURE", True)
32
+
33
+ # Redis Settings
34
+ redis_url: str = os.environ.get("REDIS_URL", "redis://redis:redis@localhost:6379/0")
35
+
36
+ # RabbitMQ Settings
37
+ rabbitmq_url: str = os.environ.get("RABBITMQ_URL", "amqp://crow:crow@localhost:5672/")
38
+ rabbitmq_host: str = os.environ.get("RABBITMQ_API", "http://crow:crow@localhost:15672/api/")
39
+ message_ttl_ms: int = os.environ.get("MARKET_DATA_MESSAGE_TTL", 5000) # 5 seconds in milliseconds
40
+
41
+ # Server Settings
42
+ server_ip: str = os.environ.get("SERVER_IP", "0.0.0.0")
43
+ server_port: int = os.environ.get("SERVER_PORT", 4200)
44
+ log_level: str = os.environ.get("LOG_LEVEL", "INFO").upper()
45
+ container_image: str = os.environ.get("CONTAINER_IMAGE", "Unknown")
46
+ image_version: str = os.environ.get("IMAGE_VERSION", "Unknown")
47
+ auto_start: bool = os.environ.get("MARKET_DATA_LISTENER_AUTO_START_ENABLED", False)
48
+
49
+
50
+ settings = Settings()
51
+
52
+ logging.basicConfig(
53
+ level=settings.log_level,
54
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
55
+ )
56
+ logger = logging.getLogger(__name__)
57
+
58
+ # Global state
59
+ massive_data_queues: Optional[MassiveDataQueues] = None
60
+ massive_data_listener: Optional[MassiveDataListener] = None
61
+
62
+
63
+ @asynccontextmanager
64
+ async def lifespan(app: FastAPI):
65
+ """Startup and shutdown events"""
66
+
67
+ # Startup
68
+ logger.info("Instantiating Market Data Listener...")
69
+ global massive_data_listener, massive_data_queues
70
+
71
+ massive_data_queues = MassiveDataQueues(
72
+ logger=logger,
73
+ rabbitmq_url=settings.rabbitmq_url,
74
+ message_ttl=settings.message_ttl_ms,
75
+ )
76
+ await massive_data_queues.setup_queues()
77
+
78
+ massive_data_listener = MassiveDataListener(
79
+ logger=logger,
80
+ message_handler=massive_data_queues.handle_messages,
81
+ api_key=settings.massive_api_key,
82
+ feed=settings.feed,
83
+ market=settings.market,
84
+ raw=settings.raw,
85
+ verbose=settings.verbose,
86
+ subscriptions=settings.subscriptions,
87
+ max_reconnects=settings.max_reconnects,
88
+ secure=settings.secure,
89
+ )
90
+ logger.info("Market Data Listener is ready.")
91
+ # NOTE: AUTO-START FEATURE IS DISABLED BY DEFAULT.
92
+ # Non-business licenses are limited to a single WebSocket connection for the entire account.
93
+ # The stop, start, and restart API functionality enables manual control of the WebSocket connection.
94
+ #
95
+ # To enable auto-start, set the environment variable MARKET_DATA_LISTENER_AUTO_START_ENABLED=true.
96
+ if settings.auto_start:
97
+ logger.info("[AUTO-START ENABLED]Starting Market Data Listener...")
98
+ await massive_data_listener.start()
99
+
100
+ yield
101
+
102
+ # Shutdown
103
+ logger.info("Shutting down WebSocket sidecar...")
104
+ await stop_websocket_client()
105
+ await massive_data_queues.shutdown()
106
+
107
+ app = FastAPI(
108
+ title="Market Data Listener",
109
+ description="Connects to market data provider and publishes to event-specific queues",
110
+ lifespan=lifespan,
111
+ )
112
+
113
+
114
+ @app.post("/feed")
115
+ async def feed(feed_str: str):
116
+ """Update Massive/Polygon.io feeds"""
117
+ original_feed = copy(settings.feed)
118
+ logger.info(f"Original feed: {original_feed}")
119
+ try:
120
+ if feed_str == Feed.RealTime.value:
121
+ logger.info(f"Setting feed to: {repr(Feed.RealTime)}")
122
+ settings.feed = Feed.RealTime
123
+ massive_data_listener.feed = Feed.RealTime
124
+ elif feed_str == Feed.Delayed.value:
125
+ logger.info(f"Setting feed to: {repr(Feed.Delayed)}")
126
+ settings.feed = Feed.Delayed
127
+ massive_data_listener.feed = Feed.Delayed
128
+ else:
129
+ raise ValueError(f"Invalid feed: {feed_str}")
130
+ except Exception as e:
131
+ logger.error(f"Error setting feed: {e}")
132
+ logger.error(f"Restoring feed to: {original_feed}")
133
+ settings.feed = original_feed
134
+ massive_data_listener.feed = original_feed
135
+ logger.error(f"Current feed: {settings.feed}")
136
+ logger.error("Rollback complete")
137
+
138
+
139
+ @app.post("/market")
140
+ async def market(market_str: str):
141
+ """Update Massive/Polygon.io market"""
142
+ original_market = copy(settings.market)
143
+ logger.info(f"Original market: {original_market}")
144
+ try:
145
+ if market_str == Market.Stocks.value:
146
+ logger.info(f"Setting market to: {repr(Market.Stocks)}")
147
+ settings.market = Market.Stocks
148
+ massive_data_listener.market = Market.Stocks
149
+ elif market_str == Market.Options.value:
150
+ logger.info(f"Setting market to: {repr(Market.Options)}")
151
+ settings.market = Market.Options
152
+ massive_data_listener.market = Market.Options
153
+ elif market_str == Market.Indices.value:
154
+ logger.info(f"Setting market to: {repr(Market.Indices)}")
155
+ settings.market = Market.Indices
156
+ massive_data_listener.market = Market.Indices
157
+ else:
158
+ raise ValueError(f"Invalid market: {market_str}")
159
+ except Exception as e:
160
+ logger.error(f"Error setting market: {e}")
161
+ logger.error(f"Restoring market to: {original_market}")
162
+ settings.market = original_market
163
+ massive_data_listener.market = original_market
164
+ logger.error(f"Current market: {settings.market}")
165
+ logger.error("Rollback complete")
166
+
167
+
168
+ @app.post("/subscriptions")
169
+ async def subscriptions(subscriptions_list: List[str]):
170
+ """Update Massive/Polygon.io subscriptions"""
171
+ original_subscriptions = copy(settings.subscriptions)
172
+ logger.info(f"Original subscriptions: {original_subscriptions}")
173
+ try:
174
+ settings.subscriptions = []
175
+ for sub in subscriptions_list:
176
+ # Only add subscriptions that start with one of the following prefixes:
177
+ # "A.*", "AM.*", "T.*", "Q.*", "LULD.*"
178
+ if (sub.startswith("A.") or
179
+ sub.startswith("AM.") or
180
+ sub.startswith("T.") or
181
+ sub.startswith("Q.") or
182
+ sub.startswith("LULD.")):
183
+ logger.info(f"Adding subscription: {sub}")
184
+ settings.subscriptions.append(sub)
185
+ massive_data_listener.subscriptions = settings.subscriptions
186
+ logger.info(f"Current subscriptions: {settings.subscriptions}")
187
+ except Exception as e:
188
+ logger.error(f"Error setting subscriptions: {e}")
189
+ logger.error(f"Restoring subscriptions to: {original_subscriptions}")
190
+ settings.subscriptions = original_subscriptions
191
+ massive_data_listener.subscriptions = original_subscriptions
192
+ logger.error(f"Current subscriptions: {settings.subscriptions}")
193
+ logger.error("Rollback complete")
194
+
195
+
196
+ @app.get("/start")
197
+ async def start_websocket_client():
198
+ logger.info("Starting Market Data Listener...")
199
+ await massive_data_listener.start()
200
+
201
+
202
+ @app.get("/stop")
203
+ async def stop_websocket_client():
204
+ logger.info("Stopping Market Data Listener...")
205
+ await massive_data_listener.stop()
206
+
207
+
208
+ @app.get("/restart")
209
+ async def restart_websocket_client():
210
+ logger.info("Restarting Market Data Listener...")
211
+ await massive_data_listener.restart()
212
+
213
+
214
+ @app.get("/")
215
+ async def root():
216
+ if massive_data_queues.connection_status["connected"] and massive_data_listener.connection_status["connected"]:
217
+ ret = "Running"
218
+ elif massive_data_queues.connection_status["connected"]:
219
+ ret = "Idle"
220
+ else:
221
+ ret = "Unhealthy"
222
+ return {
223
+ "service": "Market Data Listener",
224
+ "status": ret,
225
+ "auto-start": settings.auto_start,
226
+ "container_image": settings.container_image,
227
+ "image_version": settings.image_version,
228
+ "mdq_connection_status": massive_data_queues.connection_status,
229
+ "mdl_connection_status": massive_data_listener.connection_status
230
+ }
231
+
232
+
233
+ @app.get("/health", status_code=200)
234
+ async def health_check(response: Response):
235
+ """Health check endpoint"""
236
+ # The server should be connected to MDQ even when the WebSocket client is not running.
237
+ status_message = "OK"
238
+ if not massive_data_queues.connection_status["connected"]:
239
+ status_message = "Unhealthy"
240
+ response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
241
+ # TODO: Investigate if this caused health check failures in production during off-hours.
242
+ # if settings.auto_start and not massive_data_listener.connection_status["connected"]:
243
+ # status_message = "Unhealthy"
244
+ # response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
245
+ return {
246
+ "service": "Market Data Listener",
247
+ "status": status_message,
248
+ "auto-start": settings.auto_start,
249
+ "container_image": settings.container_image,
250
+ "image_version": settings.image_version,
251
+ "mdq_connection_status": massive_data_queues.connection_status,
252
+ "mdl_connection_status": massive_data_listener.connection_status
253
+ }
254
+
255
+
256
+ if __name__ == "__main__":
257
+ import uvicorn
258
+ uvicorn.run(app, host="0.0.0.0", port=4200)
@@ -0,0 +1,245 @@
1
+ import asyncio
2
+ import logging
3
+ import os
4
+ from contextlib import asynccontextmanager
5
+ from typing import Dict, Union
6
+
7
+ # import redis.asyncio as aioredis
8
+ from fastapi import FastAPI, Response, status
9
+ from fastapi.responses import RedirectResponse
10
+ from pydantic_settings import BaseSettings
11
+
12
+ from massive.rest import RESTClient
13
+
14
+ from kuhl_haus.mdp.analyzers.top_stocks import TopStocksAnalyzer
15
+ from kuhl_haus.mdp.components.market_data_scanner import MarketDataScanner
16
+ # from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
17
+ from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
18
+ from kuhl_haus.mdp.models.market_data_scanner_names import MarketDataScannerNames
19
+ from kuhl_haus.mdp.models.massive_data_queue import MassiveDataQueue
20
+ from kuhl_haus.mdp.integ.massive_data_processor import MassiveDataProcessor
21
+ from kuhl_haus.mdp.integ.utils import get_massive_api_key
22
+ from kuhl_haus.mdp.helpers.process_manager import ProcessManager
23
+
24
+
25
+ class Settings(BaseSettings):
26
+ # Massive/Polygon.io API Key
27
+ massive_api_key: str = get_massive_api_key()
28
+
29
+ # RabbitMQ Settings
30
+ rabbitmq_url: str = os.environ.get("RABBITMQ_URL", "amqp://crow:crow@localhost:5672/")
31
+ rabbitmq_host: str = os.environ.get("RABBITMQ_API", "http://crow:crow@localhost:15672/api/")
32
+ message_ttl_ms: int = os.environ.get("MARKET_DATA_MESSAGE_TTL", 5000) # 5 seconds in milliseconds
33
+
34
+ # Redis Settings
35
+ redis_url: str = os.environ.get("REDIS_URL", "redis://redis:redis@localhost:6379/0")
36
+
37
+ # Server Settings
38
+ server_ip: str = os.environ.get("SERVER_IP", "0.0.0.0")
39
+ server_port: int = os.environ.get("SERVER_PORT", 4201)
40
+ log_level: str = os.environ.get("LOG_LEVEL", "INFO").upper()
41
+ container_image: str = os.environ.get("CONTAINER_IMAGE", "Unknown")
42
+ image_version: str = os.environ.get("IMAGE_VERSION", "Unknown")
43
+
44
+
45
+ settings = Settings()
46
+
47
+ logging.basicConfig(
48
+ level=settings.log_level,
49
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
50
+ )
51
+ logger = logging.getLogger(__name__)
52
+
53
+
54
+ # Global state
55
+ # market_data_cache: MarketDataCache = None
56
+ market_data_scanners: Dict[str, MarketDataScanner] = {}
57
+ massive_data_processors: Dict[str, MassiveDataProcessor] = {}
58
+ massive_data_queues = [
59
+ MassiveDataQueue.TRADES.value,
60
+ MassiveDataQueue.AGGREGATE.value,
61
+ MassiveDataQueue.QUOTES.value,
62
+ MassiveDataQueue.HALTS.value,
63
+ MassiveDataQueue.NEWS.value,
64
+ MassiveDataQueue.UNKNOWN.value,
65
+ ]
66
+
67
+ # Global process manager
68
+ process_manager: ProcessManager = None
69
+
70
+
71
+ @asynccontextmanager
72
+ async def lifespan(app: FastAPI):
73
+ """Startup and shutdown events"""
74
+ global process_manager
75
+
76
+ logger.info("Starting Market Data Processor...")
77
+ process_manager = ProcessManager()
78
+
79
+ # Start MassiveDataProcessors in separate processes
80
+ for queue in massive_data_queues:
81
+ process_manager.start_worker(
82
+ name=f"mdp_{queue}",
83
+ worker_class=MassiveDataProcessor,
84
+ rabbitmq_url=settings.rabbitmq_url,
85
+ queue_name=queue,
86
+ redis_url=settings.redis_url,
87
+ )
88
+ # # Market Data Cache
89
+ # redis_client = aioredis.from_url(
90
+ # settings.redis_url,
91
+ # encoding="utf-8",
92
+ # decode_responses=True,
93
+ # max_connections=1000,
94
+ # socket_connect_timeout=10, # Add timeout
95
+ # )
96
+ # market_data_cache = MarketDataCache(redis_client=redis_client)
97
+ #
98
+ # # TODO: Create a component to fetch company information from FMP.
99
+ #
100
+ # Start MarketDataScanners in separate processes
101
+ process_manager.start_worker(
102
+ name=f"scanner_{MarketDataScannerNames.TOP_STOCKS.value}",
103
+ worker_class=MarketDataScanner,
104
+ redis_url=settings.redis_url,
105
+ analyzer=TopStocksAnalyzer(
106
+ rest_client=RESTClient(api_key=settings.massive_api_key)
107
+ ),
108
+ subscriptions=[f"{MarketDataCacheKeys.AGGREGATE.value}:*"]
109
+ )
110
+
111
+ logger.info("Market Data Processor is running.")
112
+
113
+ yield
114
+
115
+ # Shutdown
116
+ logger.info("Shutting down Market Data Processor...")
117
+ process_manager.stop_all(timeout=15.0)
118
+ logger.info("Market Data Processor is stopped.")
119
+
120
+
121
+ app = FastAPI(
122
+ title="Market Data Processor",
123
+ description="The MDP is responsible for the heavy lifting which would otherwise constrain the message handling speed of the MDL.",
124
+ lifespan=lifespan,
125
+ )
126
+
127
+
128
+ @app.get("/")
129
+ async def root():
130
+ # return redirect to health_check
131
+ return RedirectResponse(url="/health")
132
+
133
+
134
+ @app.get("/start")
135
+ async def start_scanners():
136
+ # Start all massive data processors
137
+ logger.info("Starting Massive Data Processors...")
138
+ for processor in massive_data_processors.values():
139
+ asyncio.create_task(processor.start())
140
+ logger.info("Massive Data Processors started successfully.")
141
+
142
+ logger.info("Starting Market Data Scanners...")
143
+ for k in market_data_scanners.keys():
144
+ logger.info(f"Starting {k}...")
145
+ await market_data_scanners[k].start()
146
+ logger.info(f"{k} started successfully.")
147
+ logger.info("Market Data Scanners started successfully.")
148
+
149
+
150
+ @app.post("/start_scanner")
151
+ async def start_scanner(scanner_name: str):
152
+ if scanner_name not in market_data_scanners.keys():
153
+ return {"status": "error", "message": f"Scanner {scanner_name} not found."}
154
+ logger.info(f"Starting {scanner_name}...")
155
+ await market_data_scanners[scanner_name].start()
156
+ logger.info(f"Started {scanner_name} successfully.")
157
+ return {"status": "success", "message": f"{scanner_name} started successfully."}
158
+
159
+
160
+ @app.get("/stop")
161
+ async def stop_scanners():
162
+ logger.info("Shutting down Massive Data Processors...")
163
+ for queue in massive_data_queues:
164
+ logger.info(f"Stopping {queue}...")
165
+ await massive_data_processors[queue].stop()
166
+ logger.info(f"{queue} stopped successfully.")
167
+ logger.info("Massive Data Processors stopped successfully.")
168
+ logger.info("Shutting down Market Data Scanners...")
169
+ for k in market_data_scanners.keys():
170
+ logger.info(f"Stopping {k}...")
171
+ await market_data_scanners[k].stop()
172
+ logger.info(f"{k} stopped successfully.")
173
+ logger.info("Market Data Scanners stopped successfully.")
174
+
175
+
176
+ @app.post("/stop_scanner")
177
+ async def stop_scanner(scanner_name: str):
178
+ if scanner_name not in market_data_scanners.keys():
179
+ return {"status": "error", "message": f"Scanner {scanner_name} not found."}
180
+ logger.info(f"Stopping {scanner_name}...")
181
+ await market_data_scanners[scanner_name].stop()
182
+ logger.info(f"Stopped {scanner_name} successfully.")
183
+ return {"status": "success", "message": f"{scanner_name} stopped successfully."}
184
+
185
+
186
+ @app.get("/restart")
187
+ async def restart_scanners():
188
+ logger.info("Restarting Massive Data Processors...")
189
+ for queue in massive_data_queues:
190
+ logger.info(f"Stopping {queue}...")
191
+ await massive_data_processors[queue].stop()
192
+ logger.info(f"{queue} stopped successfully.")
193
+ logger.info("Starting Massive Data Processors...")
194
+ for processor in massive_data_processors.values():
195
+ asyncio.create_task(processor.start())
196
+ logger.info("Massive Data Processors restarted successfully.")
197
+
198
+ logger.info("Restarting Market Data Scanners...")
199
+ for k in market_data_scanners.keys():
200
+ logger.info(f"Restarting {k}...")
201
+ await market_data_scanners[k].restart()
202
+ logger.info(f"{k} restarted successfully.")
203
+ logger.info("Restarting Market Data Scanners restarted successfully.")
204
+
205
+
206
+ @app.post("/restart_scanner")
207
+ async def restart_scanner(scanner_name: str):
208
+ if scanner_name not in market_data_scanners.keys():
209
+ return {"status": "error", "message": f"Scanner {scanner_name} not found."}
210
+ logger.info(f"Restarting {scanner_name}...")
211
+ await market_data_scanners[scanner_name].restart()
212
+ logger.info(f"Restarted {scanner_name} successfully.")
213
+ return {"status": "success", "message": f"{scanner_name} restarted successfully."}
214
+
215
+
216
+ @app.get("/health", status_code=200)
217
+ async def health_check(response: Response):
218
+ """Health check endpoint - always responsive"""
219
+ try:
220
+ ret: dict[str, Union[str, dict]] = {
221
+ "status": "OK",
222
+ "container_image": settings.container_image,
223
+ "image_version": settings.image_version,
224
+ }
225
+
226
+ # Non-blocking status collection
227
+ for queue in massive_data_queues:
228
+ name = f"mdp_{queue}"
229
+ ret[name] = process_manager.get_status(name)
230
+
231
+ for scanner_name in [MarketDataScannerNames.TOP_STOCKS.value]:
232
+ name = f"scanner_{scanner_name}"
233
+ ret[name] = process_manager.get_status(name)
234
+
235
+ return ret
236
+
237
+ except Exception as e:
238
+ logger.error(f"Health check error: {e}")
239
+ response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
240
+ return {"status": "ERROR", "message": "An unhandled exception occurred during health check."}
241
+
242
+
243
+ if __name__ == "__main__":
244
+ import uvicorn
245
+ uvicorn.run(app, host="0.0.0.0", port=4201)
@@ -0,0 +1,190 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ from contextlib import asynccontextmanager
5
+ from typing import Set
6
+
7
+ import redis.asyncio as redis
8
+ from fastapi import FastAPI, Response, WebSocket, WebSocketDisconnect, status
9
+ from fastapi.responses import JSONResponse, RedirectResponse
10
+ from kuhl_haus.mdp.components.widget_data_service import WidgetDataService
11
+ from pydantic_settings import BaseSettings
12
+
13
+
14
+ class UnauthorizedException(Exception):
15
+ pass
16
+
17
+
18
+ class Settings(BaseSettings):
19
+ # Redis Settings
20
+ redis_url: str = os.environ.get("REDIS_URL", "redis://localhost:6379/0")
21
+
22
+ # Server Settings
23
+ server_ip: str = os.environ.get("SERVER_IP", "0.0.0.0")
24
+ server_port: int = os.environ.get("SERVER_PORT", 4202)
25
+ log_level: str = os.environ.get("LOG_LEVEL", "INFO").upper()
26
+ container_image: str = os.environ.get("CONTAINER_IMAGE", "Unknown")
27
+ image_version: str = os.environ.get("IMAGE_VERSION", "Unknown")
28
+
29
+ # Auth Settings
30
+ auth_enabled: bool = os.environ.get("AUTH_ENABLED", False)
31
+ auth_api_key: str = os.environ.get("AUTH_API_KEY", "secret")
32
+
33
+
34
+ settings = Settings()
35
+ logging.basicConfig(
36
+ level=settings.log_level,
37
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
38
+ )
39
+ logger = logging.getLogger(__name__)
40
+
41
+
42
+ # Global service instance
43
+ wds_service: WidgetDataService = None
44
+
45
+
46
+ @asynccontextmanager
47
+ async def lifespan(app: FastAPI):
48
+ """Manage WDS lifecycle."""
49
+ global wds_service
50
+
51
+ # Startup
52
+ redis_client = redis.from_url(
53
+ settings.redis_url,
54
+ encoding="utf-8",
55
+ decode_responses=True
56
+ )
57
+ pubsub_client = redis_client.pubsub()
58
+ wds_service = WidgetDataService(redis_client=redis_client, pubsub_client=pubsub_client)
59
+ await wds_service.start()
60
+
61
+ yield
62
+
63
+ # Shutdown
64
+ await wds_service.stop()
65
+ await pubsub_client.close()
66
+ await redis_client.close()
67
+
68
+
69
+ app = FastAPI(
70
+ title="Widget Data Service",
71
+ description="WebSocket interface for client market data subscriptions",
72
+ lifespan=lifespan,
73
+ )
74
+
75
+
76
+ @app.get("/")
77
+ async def root():
78
+ # return redirect to health_check
79
+ return RedirectResponse(url="/health")
80
+
81
+
82
+ @app.get("/health", status_code=200)
83
+ async def health_check(response: Response):
84
+ """Kubernetes health check endpoint."""
85
+ try:
86
+ response.status_code = status.HTTP_200_OK
87
+ return JSONResponse({
88
+ "status": "OK",
89
+ "container_image": settings.container_image,
90
+ "image_version": settings.image_version,
91
+ })
92
+ except Exception as e:
93
+ logger.error(f"Fatal error while processing health check: {e}")
94
+ response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
95
+
96
+
97
+ @app.websocket("/ws")
98
+ async def websocket_endpoint(websocket: WebSocket):
99
+ """WebSocket endpoint for feed subscriptions.
100
+
101
+ Client protocol:
102
+ Authenticate:{"action": "auth", "api_key": "secret"}
103
+ Subscribe: {"action": "subscribe", "feed": "stocks:luld:*"}
104
+ Unsubscribe: {"action": "unsubscribe", "feed": "stocks:luld:*"}
105
+ Snapshot: {"action": "get", "cache": "stocks:luld:*"}
106
+ """
107
+ await websocket.accept()
108
+ client_info = {
109
+ "headers": json.dumps(websocket.headers.items()),
110
+ "host": websocket.client.host,
111
+ "port": websocket.client.port
112
+ }
113
+ logger.info(f"wds.ws.connected client_info:{client_info}")
114
+
115
+ active_feeds: Set[str] = set()
116
+ authenticated: bool = not settings.auth_enabled
117
+ try:
118
+ if not authenticated:
119
+ message = await websocket.receive_text()
120
+ data = json.loads(message)
121
+ action = data.get("action")
122
+
123
+ if action == "auth":
124
+ api_key = data.get("api_key")
125
+ if api_key == settings.auth_api_key:
126
+ authenticated = True
127
+ logger.info(f"wds.ws.authenticated client_info:{client_info}")
128
+ await websocket.send_json({"status": "authorized"})
129
+ else:
130
+ await websocket.send_json({"status": "invalid key"})
131
+ await websocket.close()
132
+ raise UnauthorizedException("Invalid API key")
133
+ else:
134
+ await websocket.send_json({"status": "unauthorized"})
135
+ await websocket.close()
136
+ raise UnauthorizedException("Unauthorized")
137
+ while authenticated:
138
+ message = await websocket.receive_text()
139
+ data = json.loads(message)
140
+ action = data.get("action")
141
+
142
+ if action == "subscribe":
143
+ feed = data.get("feed")
144
+ if feed:
145
+ await wds_service.subscribe(feed, websocket)
146
+ active_feeds.add(feed)
147
+ await websocket.send_json({"status": "subscribed", "feed": feed})
148
+
149
+ elif action == "unsubscribe":
150
+ feed = data.get("feed")
151
+ if feed and feed in active_feeds:
152
+ await wds_service.unsubscribe(feed, websocket)
153
+ active_feeds.remove(feed)
154
+ await websocket.send_json({"status": "unsubscribed", "feed": feed})
155
+
156
+ elif action == "get":
157
+ cache_key = data.get("cache")
158
+ if cache_key:
159
+ cached_data = await wds_service.get_cache(cache_key)
160
+ await websocket.send_json({
161
+ "cache": cache_key,
162
+ "data": cached_data
163
+ })
164
+ else:
165
+ await websocket.send_json({"status": "invalid action"})
166
+
167
+ except WebSocketDisconnect:
168
+ client_info = {
169
+ "headers": json.dumps(websocket.headers.items()),
170
+ "host": websocket.client.host,
171
+ "port": websocket.client.port
172
+ }
173
+ logger.info(f"wds.ws.disconnected client_info:{client_info}")
174
+ await wds_service.disconnect(websocket)
175
+
176
+ except UnauthorizedException:
177
+ client_info = {
178
+ "headers": json.dumps(websocket.headers.items()),
179
+ "host": websocket.client.host,
180
+ "port": websocket.client.port
181
+ }
182
+ logger.info(f"wds.ws.unauthorized client_info:{client_info}")
183
+
184
+ except Exception as e:
185
+ logger.exception(f"wds.ws.unhandled_exception {repr(e)}", exc_info=True)
186
+
187
+ finally:
188
+ # Clean up all subscriptions for this client
189
+ for feed in active_feeds:
190
+ await wds_service.unsubscribe(feed, websocket)
@@ -0,0 +1,117 @@
1
+ Metadata-Version: 2.4
2
+ Name: kuhl-haus-mdp-servers
3
+ Version: 0.1.5
4
+ Summary: Container image build repository for market data processing servers
5
+ Author-Email: Tom Pounders <git@oldschool.engineer>
6
+ License-File: LICENSE.txt
7
+ Classifier: Development Status :: 4 - Beta
8
+ Classifier: Programming Language :: Python
9
+ Project-URL: Homepage, https://github.com/kuhl-haus/kuhl-haus-mdp-servers
10
+ Project-URL: Documentation, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/wiki
11
+ Project-URL: Source, https://github.com/kuhl-haus/kuhl-haus-mdp-servers.git
12
+ Project-URL: Changelog, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/commits
13
+ Project-URL: Tracker, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues
14
+ Requires-Python: <3.13,>=3.9.21
15
+ Requires-Dist: kuhl-haus-mdp
16
+ Requires-Dist: websockets
17
+ Requires-Dist: aio-pika
18
+ Requires-Dist: redis[asyncio]
19
+ Requires-Dist: tenacity
20
+ Requires-Dist: fastapi
21
+ Requires-Dist: uvicorn[standard]
22
+ Requires-Dist: pydantic-settings
23
+ Requires-Dist: python-dotenv
24
+ Requires-Dist: massive
25
+ Provides-Extra: testing
26
+ Requires-Dist: setuptools; extra == "testing"
27
+ Requires-Dist: pdm-backend; extra == "testing"
28
+ Requires-Dist: pytest; extra == "testing"
29
+ Requires-Dist: pytest-cov; extra == "testing"
30
+ Description-Content-Type: text/markdown
31
+
32
+
33
+ [![License](https://img.shields.io/github/license/kuhl-haus/kuhl-haus-mdp-servers)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/blob/mainline/LICENSE.txt)
34
+ [![PyPI](https://img.shields.io/pypi/v/kuhl-haus-mdp-servers.svg)](https://pypi.org/project/kuhl-haus-mdp-servers/)
35
+ [![release](https://img.shields.io/github/v/release/kuhl-haus/kuhl-haus-mdp-servers?style=flat-square)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/releases)
36
+ [![Build Status](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml)
37
+ [![CodeQL Advanced](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml/badge.svg)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml)
38
+ [![Downloads](https://static.pepy.tech/badge/kuhl-haus-mdp-servers/month)](https://pepy.tech/project/kuhl-haus-mdp-servers)
39
+ [![GitHub last commit](https://img.shields.io/github/last-commit/kuhl-haus/kuhl-haus-mdp-servers)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/branches)
40
+ [![GitHub issues](https://img.shields.io/github/issues/kuhl-haus/kuhl-haus-mdp-servers)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues)
41
+ [![GitHub pull requests](https://img.shields.io/github/issues-pr/kuhl-haus/kuhl-haus-mdp-servers)](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/pulls)
42
+
43
+
44
+
45
+ # kuhl-haus-mdp-servers
46
+ Container image build repository for market data processing servers
47
+
48
+ ## TL;DR
49
+ Non-business Massive (AKA Polygon.IO) accounts are limited to a single WebSocket connection per asset class and it has to be fast enough to handle messages in a non-blocking fashion or it'll get disconnected. The market data processing pipeline consists of loosely-coupled market data processing components so that a single WebSocket connection can handle messages fast enough to maintain a reliable connection with the market data provider.
50
+
51
+ Per, https://massive.com/docs/websocket/quickstart#connecting-to-the-websocket:
52
+ > *By default, one concurrent WebSocket connection per asset class is allowed. If you require multiple simultaneous connections for the same asset class, please [contact support](https://massive.com/contact).*
53
+
54
+ # Components Summary
55
+
56
+ Non-business Massive (AKA Polygon.IO) accounts are limited to a single WebSocket connection per asset class and it has to be fast enough to handle messages in a non-blocking fashion or it'll get disconnected. The Market Data Listener (MDL) connects to the Market Data Source (Massive) and subscribes to unfiltered feeds. MDL inspects the message type for selecting the appropriate serialization method and destination Market Data Queue (MDQ). The Market Data Processors (MDP) subscribe to raw market data in the MDQ and perform the heavy lifting that would otherwise constrain the message handling speed of the MDL. This decoupling allows the MDP and MDL to scale independently. Post-processed market data is stored in the MDC for consumption by the Widget Data Service (WDS). Client-side widgets receive market data from the WDS, which provides a WebSocket interface to MDC pub/sub streams and cached data.
57
+
58
+ [![Market Data Processing C4-V1.drawio.png](docs/Market_Data_Processing_C4.png)]
59
+
60
+ # Component Descriptions
61
+
62
+ ## Market Data Listener (MDL)
63
+ The MDL performs minimal processing on the messages. MDL inspects the message type for selecting the appropriate serialization method and destination queue. MDL implementations may vary as new MDS become available (for example, news).
64
+
65
+ MDL runs as a container and scales independently of other components. The MDL should not be accessible outside the data plane local network.
66
+
67
+ ## Market Data Queues (MDQ)
68
+
69
+ **Purpose:** Buffer high-velocity market data stream for server-side processing with aggressive freshness controls
70
+ - **Queue Type:** FIFO with TTL (5-second max message age)
71
+ - **Cleanup Strategy:** Discarded when TTL expires
72
+ - **Message Format:** Timestamped JSON preserving original Massive.com structure
73
+ - **Durability:** Non-persistent messages (speed over reliability for real-time data)
74
+ - **Independence:** Queues operate completely independently - one queue per subscription
75
+ - **Technology**: RabbitMQ
76
+
77
+ The MDQ should not be accessible outside the data plane local network.
78
+
79
+ ## Market Data Processors (MDP)
80
+ The purpose of the MDP is to process raw real-time market data and delegate processing to data-specific handlers. This separation of concerns allows MDPs to handle any type of data and simplifies horizontal scaling. The MDP stores its processed results in the Market Data Cache (MDC).
81
+
82
+ The MDP:
83
+ - Hydrates the in-memory cache on MDC
84
+ - Processes market data
85
+ - Publishes messages to pub/sub channels
86
+ - Maintains cache entries in MDC
87
+
88
+ MDPs runs as containers and scale independently of other components. The MDPs should not be accessible outside the data plane local network.
89
+
90
+ ## Market Data Cache (MDC)
91
+
92
+ **Purpose:** In-memory data store for serialized processed market data.
93
+ * **Cache Type**: In-memory persistent or with TTL
94
+ - **Queue Type:** pub/sub
95
+ - **Technology**: Redis
96
+
97
+ The MDC should not be accessible outside the data plane local network.
98
+
99
+ ## Widget Data Service (WDS)
100
+ **Purpose**:
101
+ 1. WebSocket interface provides access to processed market data for client-side code
102
+ 2. Is the network-layer boundary between clients and the data that is available on the data plane
103
+
104
+ WDS runs as a container and scales independently of other components. WDS is the only data plane component that should be exposed to client networks.
105
+
106
+
107
+ ## Service Control Plane (SCP)
108
+ **Purpose**:
109
+ 1. Authentication and authorization
110
+ 2. Serve static and dynamic content via py4web
111
+ 3. Serve SPA to authenticated clients
112
+ 4. Injects authentication token and WDS url into SPA environment for authenticated access to WDS
113
+ 5. Control plane for managing application components at runtime
114
+ 6. API for programmatic access to service controls and instrumentation.
115
+
116
+ The SCP requires access to the data plane network for API access to data plane components.
117
+
@@ -0,0 +1,9 @@
1
+ kuhl_haus/servers/__init__.py,sha256=5dEpAdB3kypH8tCRECoXwbly1WV9kFU5kh8ldGSa0VI,349
2
+ kuhl_haus/servers/mdl_server.py,sha256=iSkP7GANluStDeDqFJhL4IOAQNo94jUiX9HbAnmuazc,10273
3
+ kuhl_haus/servers/mdp_server.py,sha256=sUeZOmSIjzPHIiYmZKlpmIy2Lag4FLZivI9J6XGo-PQ,9017
4
+ kuhl_haus/servers/wds_server.py,sha256=DYLbq8Al67h-p4TzUJYl-RCU61izSUYYNtzvzb5KGyM,6418
5
+ kuhl_haus_mdp_servers-0.1.5.dist-info/METADATA,sha256=S3gIGsy_nnyite8-r5ECn4a4cVulg2mS6RQ_q4d8lUk,7347
6
+ kuhl_haus_mdp_servers-0.1.5.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
7
+ kuhl_haus_mdp_servers-0.1.5.dist-info/entry_points.txt,sha256=LPD1rLjALMFQF-BcJTcVMcZRqR89QPSqu05BHC4BBWc,172
8
+ kuhl_haus_mdp_servers-0.1.5.dist-info/licenses/LICENSE.txt,sha256=j5GdO9_Y_RH0lr1tsVAY1rtBJxoyC_-MmqaFNDE3dwo,1066
9
+ kuhl_haus_mdp_servers-0.1.5.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: pdm-backend (2.4.6)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,7 @@
1
+ [console_scripts]
2
+ mdl_server = kuhl_haus.servers.mdl_server:app
3
+ mdp_server = kuhl_haus.servers.mdp_server:app
4
+ wds_server = kuhl_haus.servers.wds_server:app
5
+
6
+ [gui_scripts]
7
+
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Kuhl Haus
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.