kuhl-haus-mdp-servers 0.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kuhl_haus_mdp_servers-0.0.0/PKG-INFO +36 -0
- kuhl_haus_mdp_servers-0.0.0/README.md +15 -0
- kuhl_haus_mdp_servers-0.0.0/pyproject.toml +79 -0
- kuhl_haus_mdp_servers-0.0.0/setup.cfg +28 -0
- kuhl_haus_mdp_servers-0.0.0/setup.py +22 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus/servers/__init__.py +10 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus/servers/mdl_server.py +258 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus/servers/mdp_server.py +245 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus/servers/wds_server.py +200 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus_mdp_servers.egg-info/PKG-INFO +36 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus_mdp_servers.egg-info/SOURCES.txt +15 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus_mdp_servers.egg-info/dependency_links.txt +1 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus_mdp_servers.egg-info/entry_points.txt +4 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus_mdp_servers.egg-info/requires.txt +7 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus_mdp_servers.egg-info/top_level.txt +1 -0
- kuhl_haus_mdp_servers-0.0.0/src/kuhl_haus_mdp_servers.egg-info/zip-safe +1 -0
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: kuhl-haus-mdp-servers
|
|
3
|
+
Version: 0.0.0
|
|
4
|
+
Summary: Container image build repository for market data processing servers
|
|
5
|
+
Author-email: Tom Pounders <git@oldschool.engineer>
|
|
6
|
+
Project-URL: Homepage, https://github.com/kuhl-haus/kuhl-haus-mdp-servers
|
|
7
|
+
Project-URL: Documentation, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/wiki
|
|
8
|
+
Project-URL: Source, https://github.com/kuhl-haus/kuhl-haus-mdp-servers.git
|
|
9
|
+
Project-URL: Changelog, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/commits
|
|
10
|
+
Project-URL: Tracker, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Programming Language :: Python
|
|
13
|
+
Requires-Python: <3.13,>=3.9.21
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
Requires-Dist: kuhl-haus-mdp
|
|
16
|
+
Provides-Extra: testing
|
|
17
|
+
Requires-Dist: setuptools; extra == "testing"
|
|
18
|
+
Requires-Dist: pdm-backend; extra == "testing"
|
|
19
|
+
Requires-Dist: pytest; extra == "testing"
|
|
20
|
+
Requires-Dist: pytest-cov; extra == "testing"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/blob/mainline/LICENSE.txt)
|
|
24
|
+
[](https://pypi.org/project/kuhl-haus-mdp-servers/)
|
|
25
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/releases)
|
|
26
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml)
|
|
27
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml)
|
|
28
|
+
[](https://pepy.tech/project/kuhl-haus-mdp-servers)
|
|
29
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/branches)
|
|
30
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues)
|
|
31
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/pulls)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
# kuhl-haus-mdp-servers
|
|
36
|
+
Container image build repository for market data processing servers
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
|
|
2
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/blob/mainline/LICENSE.txt)
|
|
3
|
+
[](https://pypi.org/project/kuhl-haus-mdp-servers/)
|
|
4
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/releases)
|
|
5
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml)
|
|
6
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml)
|
|
7
|
+
[](https://pepy.tech/project/kuhl-haus-mdp-servers)
|
|
8
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/branches)
|
|
9
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues)
|
|
10
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/pulls)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# kuhl-haus-mdp-servers
|
|
15
|
+
Container image build repository for market data processing servers
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["pdm-backend", "setuptools>=61.0"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "kuhl-haus-mdp-servers"
|
|
7
|
+
dynamic = ["version"]
|
|
8
|
+
description = "Container image build repository for market data processing servers"
|
|
9
|
+
authors = [
|
|
10
|
+
{ name = "Tom Pounders", email = "git@oldschool.engineer" },
|
|
11
|
+
]
|
|
12
|
+
readme = "README.md"
|
|
13
|
+
requires-python = "<3.13,>=3.9.21"
|
|
14
|
+
license-files = ["LICENSE.txt"]
|
|
15
|
+
classifiers = [
|
|
16
|
+
"Development Status :: 4 - Beta",
|
|
17
|
+
"Programming Language :: Python"
|
|
18
|
+
]
|
|
19
|
+
dependencies = [
|
|
20
|
+
"kuhl-haus-mdp",
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
[project.urls]
|
|
24
|
+
Homepage = "https://github.com/kuhl-haus/kuhl-haus-mdp-servers"
|
|
25
|
+
Documentation = "https://github.com/kuhl-haus/kuhl-haus-mdp-servers/wiki"
|
|
26
|
+
Source = "https://github.com/kuhl-haus/kuhl-haus-mdp-servers.git"
|
|
27
|
+
Changelog = "https://github.com/kuhl-haus/kuhl-haus-mdp-servers/commits"
|
|
28
|
+
Tracker = "https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues"
|
|
29
|
+
|
|
30
|
+
[project.optional-dependencies]
|
|
31
|
+
testing = [
|
|
32
|
+
"setuptools",
|
|
33
|
+
"pdm-backend",
|
|
34
|
+
"pytest",
|
|
35
|
+
"pytest-cov",
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
[tool.setuptools_scm]
|
|
39
|
+
version_scheme = "guess-next-dev"
|
|
40
|
+
local_scheme = "no-local-version"
|
|
41
|
+
|
|
42
|
+
[project.scripts]
|
|
43
|
+
mdl_server = "kuhl_haus.servers.mdl_server:app"
|
|
44
|
+
mdp_server = "kuhl_haus.servers.mdp_server:app"
|
|
45
|
+
wds_server = "kuhl_haus.servers.wds_server:app"
|
|
46
|
+
|
|
47
|
+
[tool.setuptools]
|
|
48
|
+
zip-safe = true
|
|
49
|
+
include-package-data = true
|
|
50
|
+
package-dir = {"" = "src"}
|
|
51
|
+
packages = {find = {where = ["src"], exclude = ["tests"]}}
|
|
52
|
+
|
|
53
|
+
[tool.pytest.ini_options]
|
|
54
|
+
addopts = ["--verbose"]
|
|
55
|
+
norecursedirs = ["dist", "build", ".tox"]
|
|
56
|
+
testpaths = ["tests"]
|
|
57
|
+
|
|
58
|
+
[tool.devpi.upload]
|
|
59
|
+
no_vcs = 1
|
|
60
|
+
formats = ["bdist_wheel"]
|
|
61
|
+
|
|
62
|
+
[tool.flake8]
|
|
63
|
+
max_line_length = 88
|
|
64
|
+
extend_ignore = ["E203", "W503"]
|
|
65
|
+
exclude = [".tox", "build", "dist", ".eggs", "docs/conf.py"]
|
|
66
|
+
|
|
67
|
+
[tool.pyscaffold]
|
|
68
|
+
version = "4.6"
|
|
69
|
+
package = "servers"
|
|
70
|
+
extensions = ["namespace"]
|
|
71
|
+
namespace = "kuhl_haus"
|
|
72
|
+
|
|
73
|
+
[tool.pdm]
|
|
74
|
+
distribution = true
|
|
75
|
+
version = { source = "scm" }
|
|
76
|
+
|
|
77
|
+
[tool.pdm.build]
|
|
78
|
+
package-dir = "src"
|
|
79
|
+
includes = ["src/kuhl_haus"]
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
[options]
|
|
2
|
+
package_dir =
|
|
3
|
+
= src
|
|
4
|
+
zip_safe = True
|
|
5
|
+
include_package_data = True
|
|
6
|
+
|
|
7
|
+
[options.packages.find]
|
|
8
|
+
where = src
|
|
9
|
+
exclude =
|
|
10
|
+
tests
|
|
11
|
+
|
|
12
|
+
[options.package_data]
|
|
13
|
+
kuhl_haus.servers =
|
|
14
|
+
py.typed
|
|
15
|
+
|
|
16
|
+
[tool:pytest]
|
|
17
|
+
addopts =
|
|
18
|
+
--verbose
|
|
19
|
+
norecursedirs =
|
|
20
|
+
dist
|
|
21
|
+
build
|
|
22
|
+
.tox
|
|
23
|
+
testpaths = tests
|
|
24
|
+
|
|
25
|
+
[egg_info]
|
|
26
|
+
tag_build =
|
|
27
|
+
tag_date = 0
|
|
28
|
+
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Setup file for kuhl-haus-crow.
|
|
3
|
+
Use setup.cfg to configure your project.
|
|
4
|
+
|
|
5
|
+
This file was generated with PyScaffold 4.6.
|
|
6
|
+
PyScaffold helps you to put up the scaffold of your new Python project.
|
|
7
|
+
Learn more under: https://pyscaffold.org/
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from setuptools import setup
|
|
11
|
+
|
|
12
|
+
if __name__ == "__main__":
|
|
13
|
+
try:
|
|
14
|
+
setup()
|
|
15
|
+
except: # noqa
|
|
16
|
+
print(
|
|
17
|
+
"\n\nAn error occurred while building the project, "
|
|
18
|
+
"please ensure you have the most updated version of setuptools, "
|
|
19
|
+
"setuptools_scm and wheel with:\n"
|
|
20
|
+
" pip install -U setuptools setuptools_scm wheel\n\n"
|
|
21
|
+
)
|
|
22
|
+
raise
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from importlib_metadata import PackageNotFoundError, version # pragma: no cover
|
|
2
|
+
|
|
3
|
+
try:
|
|
4
|
+
# Change here if project is renamed and does not equal the package name
|
|
5
|
+
dist_name = __name__
|
|
6
|
+
__version__ = version(dist_name)
|
|
7
|
+
except PackageNotFoundError: # pragma: no cover
|
|
8
|
+
__version__ = "unknown"
|
|
9
|
+
finally:
|
|
10
|
+
del version, PackageNotFoundError
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
from contextlib import asynccontextmanager
|
|
4
|
+
from copy import copy
|
|
5
|
+
from typing import Optional, List, Union
|
|
6
|
+
|
|
7
|
+
from fastapi import FastAPI, Response, status
|
|
8
|
+
from massive.websocket import Feed, Market
|
|
9
|
+
from pydantic_settings import BaseSettings
|
|
10
|
+
|
|
11
|
+
from kuhl_haus.mdp.integ.massive_data_queues import MassiveDataQueues
|
|
12
|
+
from kuhl_haus.mdp.integ.massive_data_listener import MassiveDataListener
|
|
13
|
+
from kuhl_haus.mdp.integ.utils import get_massive_api_key
|
|
14
|
+
|
|
15
|
+
logging.basicConfig(
|
|
16
|
+
level=logging.INFO,
|
|
17
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
18
|
+
)
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Settings(BaseSettings):
|
|
23
|
+
# TODO: Retrieve Massive client settings from Service Control Plane API call
|
|
24
|
+
# Massive/Polygon.io API Key
|
|
25
|
+
massive_api_key: str = get_massive_api_key()
|
|
26
|
+
|
|
27
|
+
# Massive/Polygon.io Subscription Settings
|
|
28
|
+
# The default values can be overridden via environment variable; use the API to manage at runtime.
|
|
29
|
+
feed: Union[str, Feed] = os.environ.get("MASSIVE_FEED", Feed.RealTime)
|
|
30
|
+
market: Union[str, Market] = os.environ.get("MASSIVE_MARKET", Market.Stocks)
|
|
31
|
+
subscriptions: Optional[List[str]] = os.environ.get("MASSIVE_SUBSCRIPTIONS", ["A.*"])
|
|
32
|
+
|
|
33
|
+
# Additional Massive/Polygon.io Settings - default values can be overridden via environment variables
|
|
34
|
+
raw: bool = os.environ.get("MASSIVE_RAW", False)
|
|
35
|
+
verbose: bool = os.environ.get("MASSIVE_VERBOSE", False)
|
|
36
|
+
max_reconnects: Optional[int] = os.environ.get("MASSIVE_MAX_RECONNECTS", 5)
|
|
37
|
+
secure: bool = os.environ.get("MASSIVE_SECURE", True)
|
|
38
|
+
|
|
39
|
+
# Redis Settings
|
|
40
|
+
redis_url: str = os.environ.get("REDIS_URL", "redis://redis:redis@localhost:6379/0")
|
|
41
|
+
|
|
42
|
+
# RabbitMQ Settings
|
|
43
|
+
rabbitmq_url: str = os.environ.get("RABBITMQ_URL", "amqp://crow:crow@localhost:5672/")
|
|
44
|
+
rabbitmq_host: str = os.environ.get("RABBITMQ_API", "http://crow:crow@localhost:15672/api/")
|
|
45
|
+
message_ttl_ms: int = os.environ.get("MARKET_DATA_MESSAGE_TTL", 5000) # 5 seconds in milliseconds
|
|
46
|
+
|
|
47
|
+
# Server Settings
|
|
48
|
+
server_ip: str = os.environ.get("SERVER_IP", "0.0.0.0")
|
|
49
|
+
server_port: int = os.environ.get("SERVER_PORT", 4200)
|
|
50
|
+
log_level: str = os.environ.get("LOG_LEVEL", "INFO").upper()
|
|
51
|
+
container_image: str = os.environ.get("CONTAINER_IMAGE", "Unknown")
|
|
52
|
+
image_version: str = os.environ.get("IMAGE_VERSION", "Unknown")
|
|
53
|
+
auto_start: bool = os.environ.get("MARKET_DATA_LISTENER_AUTO_START_ENABLED", False)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
settings = Settings()
|
|
57
|
+
|
|
58
|
+
# Global state
|
|
59
|
+
massive_data_queues: Optional[MassiveDataQueues] = None
|
|
60
|
+
massive_data_listener: Optional[MassiveDataListener] = None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@asynccontextmanager
|
|
64
|
+
async def lifespan(app: FastAPI):
|
|
65
|
+
"""Startup and shutdown events"""
|
|
66
|
+
|
|
67
|
+
# Startup
|
|
68
|
+
logger.info("Instantiating Market Data Listener...")
|
|
69
|
+
global massive_data_listener, massive_data_queues
|
|
70
|
+
|
|
71
|
+
massive_data_queues = MassiveDataQueues(
|
|
72
|
+
logger=logger,
|
|
73
|
+
rabbitmq_url=settings.rabbitmq_url,
|
|
74
|
+
message_ttl=settings.message_ttl_ms,
|
|
75
|
+
)
|
|
76
|
+
await massive_data_queues.setup_queues()
|
|
77
|
+
|
|
78
|
+
massive_data_listener = MassiveDataListener(
|
|
79
|
+
logger=logger,
|
|
80
|
+
message_handler=massive_data_queues.handle_messages,
|
|
81
|
+
api_key=settings.massive_api_key,
|
|
82
|
+
feed=settings.feed,
|
|
83
|
+
market=settings.market,
|
|
84
|
+
raw=settings.raw,
|
|
85
|
+
verbose=settings.verbose,
|
|
86
|
+
subscriptions=settings.subscriptions,
|
|
87
|
+
max_reconnects=settings.max_reconnects,
|
|
88
|
+
secure=settings.secure,
|
|
89
|
+
)
|
|
90
|
+
logger.info("Market Data Listener is ready.")
|
|
91
|
+
# NOTE: AUTO-START FEATURE IS DISABLED BY DEFAULT.
|
|
92
|
+
# Non-business licenses are limited to a single WebSocket connection for the entire account.
|
|
93
|
+
# The stop, start, and restart API functionality enables manual control of the WebSocket connection.
|
|
94
|
+
#
|
|
95
|
+
# To enable auto-start, set the environment variable MARKET_DATA_LISTENER_AUTO_START_ENABLED=true.
|
|
96
|
+
if settings.auto_start:
|
|
97
|
+
logger.info("[AUTO-START ENABLED]Starting Market Data Listener...")
|
|
98
|
+
await massive_data_listener.start()
|
|
99
|
+
|
|
100
|
+
yield
|
|
101
|
+
|
|
102
|
+
# Shutdown
|
|
103
|
+
logger.info("Shutting down WebSocket sidecar...")
|
|
104
|
+
await stop_websocket_client()
|
|
105
|
+
await massive_data_queues.shutdown()
|
|
106
|
+
|
|
107
|
+
app = FastAPI(
|
|
108
|
+
title="Market Data Listener",
|
|
109
|
+
description="Connects to market data provider and publishes to event-specific queues",
|
|
110
|
+
lifespan=lifespan,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
@app.post("/feed")
|
|
115
|
+
async def feed(feed_str: str):
|
|
116
|
+
"""Update Massive/Polygon.io feeds"""
|
|
117
|
+
original_feed = copy(settings.feed)
|
|
118
|
+
logger.info(f"Original feed: {original_feed}")
|
|
119
|
+
try:
|
|
120
|
+
if feed_str == Feed.RealTime.value:
|
|
121
|
+
logger.info(f"Setting feed to: {repr(Feed.RealTime)}")
|
|
122
|
+
settings.feed = Feed.RealTime
|
|
123
|
+
massive_data_listener.feed = Feed.RealTime
|
|
124
|
+
elif feed_str == Feed.Delayed.value:
|
|
125
|
+
logger.info(f"Setting feed to: {repr(Feed.Delayed)}")
|
|
126
|
+
settings.feed = Feed.Delayed
|
|
127
|
+
massive_data_listener.feed = Feed.Delayed
|
|
128
|
+
else:
|
|
129
|
+
raise ValueError(f"Invalid feed: {feed_str}")
|
|
130
|
+
except Exception as e:
|
|
131
|
+
logger.error(f"Error setting feed: {e}")
|
|
132
|
+
logger.error(f"Restoring feed to: {original_feed}")
|
|
133
|
+
settings.feed = original_feed
|
|
134
|
+
massive_data_listener.feed = original_feed
|
|
135
|
+
logger.error(f"Current feed: {settings.feed}")
|
|
136
|
+
logger.error("Rollback complete")
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
@app.post("/market")
|
|
140
|
+
async def market(market_str: str):
|
|
141
|
+
"""Update Massive/Polygon.io market"""
|
|
142
|
+
original_market = copy(settings.market)
|
|
143
|
+
logger.info(f"Original market: {original_market}")
|
|
144
|
+
try:
|
|
145
|
+
if market_str == Market.Stocks.value:
|
|
146
|
+
logger.info(f"Setting market to: {repr(Market.Stocks)}")
|
|
147
|
+
settings.market = Market.Stocks
|
|
148
|
+
massive_data_listener.market = Market.Stocks
|
|
149
|
+
elif market_str == Market.Options.value:
|
|
150
|
+
logger.info(f"Setting market to: {repr(Market.Options)}")
|
|
151
|
+
settings.market = Market.Options
|
|
152
|
+
massive_data_listener.market = Market.Options
|
|
153
|
+
elif market_str == Market.Indices.value:
|
|
154
|
+
logger.info(f"Setting market to: {repr(Market.Indices)}")
|
|
155
|
+
settings.market = Market.Indices
|
|
156
|
+
massive_data_listener.market = Market.Indices
|
|
157
|
+
else:
|
|
158
|
+
raise ValueError(f"Invalid market: {market_str}")
|
|
159
|
+
except Exception as e:
|
|
160
|
+
logger.error(f"Error setting market: {e}")
|
|
161
|
+
logger.error(f"Restoring market to: {original_market}")
|
|
162
|
+
settings.market = original_market
|
|
163
|
+
massive_data_listener.market = original_market
|
|
164
|
+
logger.error(f"Current market: {settings.market}")
|
|
165
|
+
logger.error("Rollback complete")
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@app.post("/subscriptions")
|
|
169
|
+
async def subscriptions(subscriptions_list: List[str]):
|
|
170
|
+
"""Update Massive/Polygon.io subscriptions"""
|
|
171
|
+
original_subscriptions = copy(settings.subscriptions)
|
|
172
|
+
logger.info(f"Original subscriptions: {original_subscriptions}")
|
|
173
|
+
try:
|
|
174
|
+
settings.subscriptions = []
|
|
175
|
+
for sub in subscriptions_list:
|
|
176
|
+
# Only add subscriptions that start with one of the following prefixes:
|
|
177
|
+
# "A.*", "AM.*", "T.*", "Q.*", "LULD.*"
|
|
178
|
+
if (sub.startswith("A.") or
|
|
179
|
+
sub.startswith("AM.") or
|
|
180
|
+
sub.startswith("T.") or
|
|
181
|
+
sub.startswith("Q.") or
|
|
182
|
+
sub.startswith("LULD.")):
|
|
183
|
+
logger.info(f"Adding subscription: {sub}")
|
|
184
|
+
settings.subscriptions.append(sub)
|
|
185
|
+
massive_data_listener.subscriptions = settings.subscriptions
|
|
186
|
+
logger.info(f"Current subscriptions: {settings.subscriptions}")
|
|
187
|
+
except Exception as e:
|
|
188
|
+
logger.error(f"Error setting subscriptions: {e}")
|
|
189
|
+
logger.error(f"Restoring subscriptions to: {original_subscriptions}")
|
|
190
|
+
settings.subscriptions = original_subscriptions
|
|
191
|
+
massive_data_listener.subscriptions = original_subscriptions
|
|
192
|
+
logger.error(f"Current subscriptions: {settings.subscriptions}")
|
|
193
|
+
logger.error("Rollback complete")
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
@app.get("/start")
|
|
197
|
+
async def start_websocket_client():
|
|
198
|
+
logger.info("Starting Market Data Listener...")
|
|
199
|
+
await massive_data_listener.start()
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
@app.get("/stop")
|
|
203
|
+
async def stop_websocket_client():
|
|
204
|
+
logger.info("Stopping Market Data Listener...")
|
|
205
|
+
await massive_data_listener.stop()
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
@app.get("/restart")
|
|
209
|
+
async def restart_websocket_client():
|
|
210
|
+
logger.info("Restarting Market Data Listener...")
|
|
211
|
+
await massive_data_listener.restart()
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
@app.get("/")
|
|
215
|
+
async def root():
|
|
216
|
+
if massive_data_queues.connection_status["connected"] and massive_data_listener.connection_status["connected"]:
|
|
217
|
+
ret = "Running"
|
|
218
|
+
elif massive_data_queues.connection_status["connected"]:
|
|
219
|
+
ret = "Idle"
|
|
220
|
+
else:
|
|
221
|
+
ret = "Unhealthy"
|
|
222
|
+
return {
|
|
223
|
+
"service": "Market Data Listener",
|
|
224
|
+
"status": ret,
|
|
225
|
+
"auto-start": settings.auto_start,
|
|
226
|
+
"container_image": settings.container_image,
|
|
227
|
+
"image_version": settings.image_version,
|
|
228
|
+
"mdq_connection_status": massive_data_queues.connection_status,
|
|
229
|
+
"mdl_connection_status": massive_data_listener.connection_status
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
@app.get("/health", status_code=200)
|
|
234
|
+
async def health_check(response: Response):
|
|
235
|
+
"""Health check endpoint"""
|
|
236
|
+
# The server should be connected to MDQ even when the WebSocket client is not running.
|
|
237
|
+
status_message = "OK"
|
|
238
|
+
if not massive_data_queues.connection_status["connected"]:
|
|
239
|
+
status_message = "Unhealthy"
|
|
240
|
+
response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
|
|
241
|
+
# TODO: Investigate if this caused health check failures in production during off-hours.
|
|
242
|
+
# if settings.auto_start and not massive_data_listener.connection_status["connected"]:
|
|
243
|
+
# status_message = "Unhealthy"
|
|
244
|
+
# response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
|
|
245
|
+
return {
|
|
246
|
+
"service": "Market Data Listener",
|
|
247
|
+
"status": status_message,
|
|
248
|
+
"auto-start": settings.auto_start,
|
|
249
|
+
"container_image": settings.container_image,
|
|
250
|
+
"image_version": settings.image_version,
|
|
251
|
+
"mdq_connection_status": massive_data_queues.connection_status,
|
|
252
|
+
"mdl_connection_status": massive_data_listener.connection_status
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
if __name__ == "__main__":
|
|
257
|
+
import uvicorn
|
|
258
|
+
uvicorn.run(app, host="0.0.0.0", port=4200)
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from typing import Dict, Union
|
|
6
|
+
|
|
7
|
+
# import redis.asyncio as aioredis
|
|
8
|
+
from fastapi import FastAPI, Response, status
|
|
9
|
+
from fastapi.responses import RedirectResponse
|
|
10
|
+
from pydantic_settings import BaseSettings
|
|
11
|
+
|
|
12
|
+
from massive.rest import RESTClient
|
|
13
|
+
|
|
14
|
+
from kuhl_haus.mdp.analyzers.top_stocks import TopStocksAnalyzer
|
|
15
|
+
from kuhl_haus.mdp.components.market_data_scanner import MarketDataScanner
|
|
16
|
+
# from kuhl_haus.mdp.components.market_data_cache import MarketDataCache
|
|
17
|
+
from kuhl_haus.mdp.models.market_data_cache_keys import MarketDataCacheKeys
|
|
18
|
+
from kuhl_haus.mdp.models.market_data_scanner_names import MarketDataScannerNames
|
|
19
|
+
from kuhl_haus.mdp.models.massive_data_queue import MassiveDataQueue
|
|
20
|
+
from kuhl_haus.mdp.integ.massive_data_processor import MassiveDataProcessor
|
|
21
|
+
from kuhl_haus.mdp.integ.utils import get_massive_api_key
|
|
22
|
+
from kuhl_haus.mdp.helpers.process_manager import ProcessManager
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Settings(BaseSettings):
|
|
26
|
+
# Massive/Polygon.io API Key
|
|
27
|
+
massive_api_key: str = get_massive_api_key()
|
|
28
|
+
|
|
29
|
+
# RabbitMQ Settings
|
|
30
|
+
rabbitmq_url: str = os.environ.get("RABBITMQ_URL", "amqp://crow:crow@localhost:5672/")
|
|
31
|
+
rabbitmq_host: str = os.environ.get("RABBITMQ_API", "http://crow:crow@localhost:15672/api/")
|
|
32
|
+
message_ttl_ms: int = os.environ.get("MARKET_DATA_MESSAGE_TTL", 5000) # 5 seconds in milliseconds
|
|
33
|
+
|
|
34
|
+
# Redis Settings
|
|
35
|
+
redis_url: str = os.environ.get("REDIS_URL", "redis://redis:redis@localhost:6379/0")
|
|
36
|
+
|
|
37
|
+
# Server Settings
|
|
38
|
+
server_ip: str = os.environ.get("SERVER_IP", "0.0.0.0")
|
|
39
|
+
server_port: int = os.environ.get("SERVER_PORT", 4201)
|
|
40
|
+
log_level: str = os.environ.get("LOG_LEVEL", "INFO").upper()
|
|
41
|
+
container_image: str = os.environ.get("CONTAINER_IMAGE", "Unknown")
|
|
42
|
+
image_version: str = os.environ.get("IMAGE_VERSION", "Unknown")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
settings = Settings()
|
|
46
|
+
|
|
47
|
+
logging.basicConfig(
|
|
48
|
+
level=settings.log_level,
|
|
49
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
50
|
+
)
|
|
51
|
+
logger = logging.getLogger(__name__)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# Global state
|
|
55
|
+
# market_data_cache: MarketDataCache = None
|
|
56
|
+
market_data_scanners: Dict[str, MarketDataScanner] = {}
|
|
57
|
+
massive_data_processors: Dict[str, MassiveDataProcessor] = {}
|
|
58
|
+
massive_data_queues = [
|
|
59
|
+
MassiveDataQueue.TRADES.value,
|
|
60
|
+
MassiveDataQueue.AGGREGATE.value,
|
|
61
|
+
MassiveDataQueue.QUOTES.value,
|
|
62
|
+
MassiveDataQueue.HALTS.value,
|
|
63
|
+
MassiveDataQueue.NEWS.value,
|
|
64
|
+
MassiveDataQueue.UNKNOWN.value,
|
|
65
|
+
]
|
|
66
|
+
|
|
67
|
+
# Global process manager
|
|
68
|
+
process_manager: ProcessManager = None
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
@asynccontextmanager
|
|
72
|
+
async def lifespan(app: FastAPI):
|
|
73
|
+
"""Startup and shutdown events"""
|
|
74
|
+
global process_manager
|
|
75
|
+
|
|
76
|
+
logger.info("Starting Market Data Processor...")
|
|
77
|
+
process_manager = ProcessManager()
|
|
78
|
+
|
|
79
|
+
# Start MassiveDataProcessors in separate processes
|
|
80
|
+
for queue in massive_data_queues:
|
|
81
|
+
process_manager.start_worker(
|
|
82
|
+
name=f"mdp_{queue}",
|
|
83
|
+
worker_class=MassiveDataProcessor,
|
|
84
|
+
rabbitmq_url=settings.rabbitmq_url,
|
|
85
|
+
queue_name=queue,
|
|
86
|
+
redis_url=settings.redis_url,
|
|
87
|
+
)
|
|
88
|
+
# # Market Data Cache
|
|
89
|
+
# redis_client = aioredis.from_url(
|
|
90
|
+
# settings.redis_url,
|
|
91
|
+
# encoding="utf-8",
|
|
92
|
+
# decode_responses=True,
|
|
93
|
+
# max_connections=1000,
|
|
94
|
+
# socket_connect_timeout=10, # Add timeout
|
|
95
|
+
# )
|
|
96
|
+
# market_data_cache = MarketDataCache(redis_client=redis_client)
|
|
97
|
+
#
|
|
98
|
+
# # TODO: Create a component to fetch company information from FMP.
|
|
99
|
+
#
|
|
100
|
+
# Start MarketDataScanners in separate processes
|
|
101
|
+
process_manager.start_worker(
|
|
102
|
+
name=f"scanner_{MarketDataScannerNames.TOP_STOCKS.value}",
|
|
103
|
+
worker_class=MarketDataScanner,
|
|
104
|
+
redis_url=settings.redis_url,
|
|
105
|
+
analyzer=TopStocksAnalyzer(
|
|
106
|
+
rest_client=RESTClient(api_key=settings.massive_api_key)
|
|
107
|
+
),
|
|
108
|
+
subscriptions=[f"{MarketDataCacheKeys.AGGREGATE.value}:*"]
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
logger.info("Market Data Processor is running.")
|
|
112
|
+
|
|
113
|
+
yield
|
|
114
|
+
|
|
115
|
+
# Shutdown
|
|
116
|
+
logger.info("Shutting down Market Data Processor...")
|
|
117
|
+
process_manager.stop_all(timeout=15.0)
|
|
118
|
+
logger.info("Market Data Processor is stopped.")
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
app = FastAPI(
|
|
122
|
+
title="Market Data Processor",
|
|
123
|
+
description="The MDP is responsible for the heavy lifting which would otherwise constrain the message handling speed of the MDL.",
|
|
124
|
+
lifespan=lifespan,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
@app.get("/")
|
|
129
|
+
async def root():
|
|
130
|
+
# return redirect to health_check
|
|
131
|
+
return RedirectResponse(url="/health")
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@app.get("/start")
|
|
135
|
+
async def start_scanners():
|
|
136
|
+
# Start all massive data processors
|
|
137
|
+
logger.info("Starting Massive Data Processors...")
|
|
138
|
+
for processor in massive_data_processors.values():
|
|
139
|
+
asyncio.create_task(processor.start())
|
|
140
|
+
logger.info("Massive Data Processors started successfully.")
|
|
141
|
+
|
|
142
|
+
logger.info("Starting Market Data Scanners...")
|
|
143
|
+
for k in market_data_scanners.keys():
|
|
144
|
+
logger.info(f"Starting {k}...")
|
|
145
|
+
await market_data_scanners[k].start()
|
|
146
|
+
logger.info(f"{k} started successfully.")
|
|
147
|
+
logger.info("Market Data Scanners started successfully.")
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
@app.post("/start_scanner")
|
|
151
|
+
async def start_scanner(scanner_name: str):
|
|
152
|
+
if scanner_name not in market_data_scanners.keys():
|
|
153
|
+
return {"status": "error", "message": f"Scanner {scanner_name} not found."}
|
|
154
|
+
logger.info(f"Starting {scanner_name}...")
|
|
155
|
+
await market_data_scanners[scanner_name].start()
|
|
156
|
+
logger.info(f"Started {scanner_name} successfully.")
|
|
157
|
+
return {"status": "success", "message": f"{scanner_name} started successfully."}
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
@app.get("/stop")
|
|
161
|
+
async def stop_scanners():
|
|
162
|
+
logger.info("Shutting down Massive Data Processors...")
|
|
163
|
+
for queue in massive_data_queues:
|
|
164
|
+
logger.info(f"Stopping {queue}...")
|
|
165
|
+
await massive_data_processors[queue].stop()
|
|
166
|
+
logger.info(f"{queue} stopped successfully.")
|
|
167
|
+
logger.info("Massive Data Processors stopped successfully.")
|
|
168
|
+
logger.info("Shutting down Market Data Scanners...")
|
|
169
|
+
for k in market_data_scanners.keys():
|
|
170
|
+
logger.info(f"Stopping {k}...")
|
|
171
|
+
await market_data_scanners[k].stop()
|
|
172
|
+
logger.info(f"{k} stopped successfully.")
|
|
173
|
+
logger.info("Market Data Scanners stopped successfully.")
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@app.post("/stop_scanner")
|
|
177
|
+
async def stop_scanner(scanner_name: str):
|
|
178
|
+
if scanner_name not in market_data_scanners.keys():
|
|
179
|
+
return {"status": "error", "message": f"Scanner {scanner_name} not found."}
|
|
180
|
+
logger.info(f"Stopping {scanner_name}...")
|
|
181
|
+
await market_data_scanners[scanner_name].stop()
|
|
182
|
+
logger.info(f"Stopped {scanner_name} successfully.")
|
|
183
|
+
return {"status": "success", "message": f"{scanner_name} stopped successfully."}
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
@app.get("/restart")
|
|
187
|
+
async def restart_scanners():
|
|
188
|
+
logger.info("Restarting Massive Data Processors...")
|
|
189
|
+
for queue in massive_data_queues:
|
|
190
|
+
logger.info(f"Stopping {queue}...")
|
|
191
|
+
await massive_data_processors[queue].stop()
|
|
192
|
+
logger.info(f"{queue} stopped successfully.")
|
|
193
|
+
logger.info("Starting Massive Data Processors...")
|
|
194
|
+
for processor in massive_data_processors.values():
|
|
195
|
+
asyncio.create_task(processor.start())
|
|
196
|
+
logger.info("Massive Data Processors restarted successfully.")
|
|
197
|
+
|
|
198
|
+
logger.info("Restarting Market Data Scanners...")
|
|
199
|
+
for k in market_data_scanners.keys():
|
|
200
|
+
logger.info(f"Restarting {k}...")
|
|
201
|
+
await market_data_scanners[k].restart()
|
|
202
|
+
logger.info(f"{k} restarted successfully.")
|
|
203
|
+
logger.info("Restarting Market Data Scanners restarted successfully.")
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@app.post("/restart_scanner")
|
|
207
|
+
async def restart_scanner(scanner_name: str):
|
|
208
|
+
if scanner_name not in market_data_scanners.keys():
|
|
209
|
+
return {"status": "error", "message": f"Scanner {scanner_name} not found."}
|
|
210
|
+
logger.info(f"Restarting {scanner_name}...")
|
|
211
|
+
await market_data_scanners[scanner_name].restart()
|
|
212
|
+
logger.info(f"Restarted {scanner_name} successfully.")
|
|
213
|
+
return {"status": "success", "message": f"{scanner_name} restarted successfully."}
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
@app.get("/health", status_code=200)
|
|
217
|
+
async def health_check(response: Response):
|
|
218
|
+
"""Health check endpoint - always responsive"""
|
|
219
|
+
try:
|
|
220
|
+
ret: dict[str, Union[str, dict]] = {
|
|
221
|
+
"status": "OK",
|
|
222
|
+
"container_image": settings.container_image,
|
|
223
|
+
"image_version": settings.image_version,
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
# Non-blocking status collection
|
|
227
|
+
for queue in massive_data_queues:
|
|
228
|
+
name = f"mdp_{queue}"
|
|
229
|
+
ret[name] = process_manager.get_status(name)
|
|
230
|
+
|
|
231
|
+
for scanner_name in [MarketDataScannerNames.TOP_STOCKS.value]:
|
|
232
|
+
name = f"scanner_{scanner_name}"
|
|
233
|
+
ret[name] = process_manager.get_status(name)
|
|
234
|
+
|
|
235
|
+
return ret
|
|
236
|
+
|
|
237
|
+
except Exception as e:
|
|
238
|
+
logger.error(f"Health check error: {e}")
|
|
239
|
+
response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
|
|
240
|
+
return {"status": "ERROR", "message": str(e)}
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
if __name__ == "__main__":
|
|
244
|
+
import uvicorn
|
|
245
|
+
uvicorn.run(app, host="0.0.0.0", port=4201)
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
from contextlib import asynccontextmanager
|
|
6
|
+
from typing import Set
|
|
7
|
+
|
|
8
|
+
import redis.asyncio as redis
|
|
9
|
+
from fastapi import FastAPI, Response, WebSocket, WebSocketDisconnect, status
|
|
10
|
+
from fastapi.responses import JSONResponse, RedirectResponse
|
|
11
|
+
from kuhl_haus.mdp.components.widget_data_service import WidgetDataService
|
|
12
|
+
from pydantic_settings import BaseSettings
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class UnauthorizedException(Exception):
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Settings(BaseSettings):
|
|
20
|
+
# Redis Settings
|
|
21
|
+
redis_url: str = os.environ.get("REDIS_URL", "redis://redis:redis@localhost:6379/0")
|
|
22
|
+
|
|
23
|
+
# Server Settings
|
|
24
|
+
server_ip: str = os.environ.get("SERVER_IP", "0.0.0.0")
|
|
25
|
+
server_port: int = os.environ.get("SERVER_PORT", 4202)
|
|
26
|
+
log_level: str = os.environ.get("LOG_LEVEL", "INFO").upper()
|
|
27
|
+
container_image: str = os.environ.get("CONTAINER_IMAGE", "Unknown")
|
|
28
|
+
image_version: str = os.environ.get("IMAGE_VERSION", "Unknown")
|
|
29
|
+
|
|
30
|
+
# Auth Settings
|
|
31
|
+
auth_enabled: bool = os.environ.get("AUTH_ENABLED", False)
|
|
32
|
+
auth_api_key: str = os.environ.get("AUTH_API_KEY", "secret")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
settings = Settings()
|
|
36
|
+
logging.basicConfig(
|
|
37
|
+
level=settings.log_level,
|
|
38
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
39
|
+
)
|
|
40
|
+
logger = logging.getLogger(__name__)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# Global service instance
|
|
44
|
+
wds_service: WidgetDataService = None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@asynccontextmanager
|
|
48
|
+
async def lifespan(app: FastAPI):
|
|
49
|
+
"""Manage WDS lifecycle."""
|
|
50
|
+
global wds_service
|
|
51
|
+
|
|
52
|
+
# Startup
|
|
53
|
+
redis_client = redis.from_url(
|
|
54
|
+
settings.redis_url,
|
|
55
|
+
encoding="utf-8",
|
|
56
|
+
decode_responses=True
|
|
57
|
+
)
|
|
58
|
+
pubsub_client = redis_client.pubsub()
|
|
59
|
+
wds_service = WidgetDataService(redis_client=redis_client, pubsub_client=pubsub_client)
|
|
60
|
+
await wds_service.start()
|
|
61
|
+
|
|
62
|
+
yield
|
|
63
|
+
|
|
64
|
+
# Shutdown
|
|
65
|
+
await wds_service.stop()
|
|
66
|
+
await pubsub_client.close()
|
|
67
|
+
await redis_client.close()
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
app = FastAPI(
|
|
71
|
+
title="Widget Data Service",
|
|
72
|
+
description="WebSocket interface for client market data subscriptions",
|
|
73
|
+
lifespan=lifespan,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@app.get("/")
|
|
78
|
+
async def root():
|
|
79
|
+
# return redirect to health_check
|
|
80
|
+
return RedirectResponse(url="/health")
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@app.get("/restart")
|
|
84
|
+
async def restart_wds_service():
|
|
85
|
+
logger.info("Restarting Widget Data Service...")
|
|
86
|
+
await wds_service.stop()
|
|
87
|
+
await asyncio.sleep(1)
|
|
88
|
+
await wds_service.start()
|
|
89
|
+
logger.info("Widget Data Service restarted successfully.")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@app.get("/health", status_code=200)
|
|
93
|
+
async def health_check(response: Response):
|
|
94
|
+
"""Kubernetes health check endpoint."""
|
|
95
|
+
try:
|
|
96
|
+
response.status_code = status.HTTP_200_OK
|
|
97
|
+
return JSONResponse({
|
|
98
|
+
"status": "OK",
|
|
99
|
+
"container_image": settings.container_image,
|
|
100
|
+
"image_version": settings.image_version,
|
|
101
|
+
})
|
|
102
|
+
except Exception as e:
|
|
103
|
+
logger.error(f"Fatal error while processing health check: {e}")
|
|
104
|
+
response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
@app.websocket("/ws")
|
|
108
|
+
async def websocket_endpoint(websocket: WebSocket):
|
|
109
|
+
"""WebSocket endpoint for feed subscriptions.
|
|
110
|
+
|
|
111
|
+
Client protocol:
|
|
112
|
+
Authenticate:{"action": "auth", "api_key": "secret"}
|
|
113
|
+
Subscribe: {"action": "subscribe", "feed": "stocks:luld:*"}
|
|
114
|
+
Unsubscribe: {"action": "unsubscribe", "feed": "stocks:luld:*"}
|
|
115
|
+
Snapshot: {"action": "get", "cache": "stocks:luld:*"}
|
|
116
|
+
"""
|
|
117
|
+
await websocket.accept()
|
|
118
|
+
client_info = {
|
|
119
|
+
"headers": json.dumps(websocket.headers.items()),
|
|
120
|
+
"host": websocket.client.host,
|
|
121
|
+
"port": websocket.client.port
|
|
122
|
+
}
|
|
123
|
+
logger.info(f"wds.ws.connected client_info:{client_info}")
|
|
124
|
+
|
|
125
|
+
active_feeds: Set[str] = set()
|
|
126
|
+
authenticated: bool = not settings.auth_enabled
|
|
127
|
+
try:
|
|
128
|
+
if not authenticated:
|
|
129
|
+
message = await websocket.receive_text()
|
|
130
|
+
data = json.loads(message)
|
|
131
|
+
action = data.get("action")
|
|
132
|
+
|
|
133
|
+
if action == "auth":
|
|
134
|
+
api_key = data.get("api_key")
|
|
135
|
+
if api_key == settings.auth_api_key:
|
|
136
|
+
authenticated = True
|
|
137
|
+
logger.info(f"wds.ws.authenticated client_info:{client_info}")
|
|
138
|
+
await websocket.send_json({"status": "authorized"})
|
|
139
|
+
else:
|
|
140
|
+
await websocket.send_json({"status": "invalid key"})
|
|
141
|
+
await websocket.close()
|
|
142
|
+
raise UnauthorizedException("Invalid API key")
|
|
143
|
+
else:
|
|
144
|
+
await websocket.send_json({"status": "unauthorized"})
|
|
145
|
+
await websocket.close()
|
|
146
|
+
raise UnauthorizedException("Unauthorized")
|
|
147
|
+
while authenticated:
|
|
148
|
+
message = await websocket.receive_text()
|
|
149
|
+
data = json.loads(message)
|
|
150
|
+
action = data.get("action")
|
|
151
|
+
|
|
152
|
+
if action == "subscribe":
|
|
153
|
+
feed = data.get("feed")
|
|
154
|
+
if feed:
|
|
155
|
+
await wds_service.subscribe(feed, websocket)
|
|
156
|
+
active_feeds.add(feed)
|
|
157
|
+
await websocket.send_json({"status": "subscribed", "feed": feed})
|
|
158
|
+
|
|
159
|
+
elif action == "unsubscribe":
|
|
160
|
+
feed = data.get("feed")
|
|
161
|
+
if feed and feed in active_feeds:
|
|
162
|
+
await wds_service.unsubscribe(feed, websocket)
|
|
163
|
+
active_feeds.remove(feed)
|
|
164
|
+
await websocket.send_json({"status": "unsubscribed", "feed": feed})
|
|
165
|
+
|
|
166
|
+
elif action == "get":
|
|
167
|
+
cache_key = data.get("cache")
|
|
168
|
+
if cache_key:
|
|
169
|
+
cached_data = await wds_service.get_cache(cache_key)
|
|
170
|
+
await websocket.send_json({
|
|
171
|
+
"cache": cache_key,
|
|
172
|
+
"data": cached_data
|
|
173
|
+
})
|
|
174
|
+
else:
|
|
175
|
+
await websocket.send_json({"status": "invalid action"})
|
|
176
|
+
|
|
177
|
+
except WebSocketDisconnect:
|
|
178
|
+
client_info = {
|
|
179
|
+
"headers": json.dumps(websocket.headers.items()),
|
|
180
|
+
"host": websocket.client.host,
|
|
181
|
+
"port": websocket.client.port
|
|
182
|
+
}
|
|
183
|
+
logger.info(f"wds.ws.disconnected client_info:{client_info}")
|
|
184
|
+
await wds_service.disconnect(websocket)
|
|
185
|
+
|
|
186
|
+
except UnauthorizedException:
|
|
187
|
+
client_info = {
|
|
188
|
+
"headers": json.dumps(websocket.headers.items()),
|
|
189
|
+
"host": websocket.client.host,
|
|
190
|
+
"port": websocket.client.port
|
|
191
|
+
}
|
|
192
|
+
logger.info(f"wds.ws.unauthorized client_info:{client_info}")
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
logger.exception(f"wds.ws.unhandled_exception {repr(e)}", exc_info=True)
|
|
196
|
+
|
|
197
|
+
finally:
|
|
198
|
+
# Clean up all subscriptions for this client
|
|
199
|
+
for feed in active_feeds:
|
|
200
|
+
await wds_service.unsubscribe(feed, websocket)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: kuhl-haus-mdp-servers
|
|
3
|
+
Version: 0.0.0
|
|
4
|
+
Summary: Container image build repository for market data processing servers
|
|
5
|
+
Author-email: Tom Pounders <git@oldschool.engineer>
|
|
6
|
+
Project-URL: Homepage, https://github.com/kuhl-haus/kuhl-haus-mdp-servers
|
|
7
|
+
Project-URL: Documentation, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/wiki
|
|
8
|
+
Project-URL: Source, https://github.com/kuhl-haus/kuhl-haus-mdp-servers.git
|
|
9
|
+
Project-URL: Changelog, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/commits
|
|
10
|
+
Project-URL: Tracker, https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Programming Language :: Python
|
|
13
|
+
Requires-Python: <3.13,>=3.9.21
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
Requires-Dist: kuhl-haus-mdp
|
|
16
|
+
Provides-Extra: testing
|
|
17
|
+
Requires-Dist: setuptools; extra == "testing"
|
|
18
|
+
Requires-Dist: pdm-backend; extra == "testing"
|
|
19
|
+
Requires-Dist: pytest; extra == "testing"
|
|
20
|
+
Requires-Dist: pytest-cov; extra == "testing"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/blob/mainline/LICENSE.txt)
|
|
24
|
+
[](https://pypi.org/project/kuhl-haus-mdp-servers/)
|
|
25
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/releases)
|
|
26
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/build-images.yml)
|
|
27
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/actions/workflows/codeql.yml)
|
|
28
|
+
[](https://pepy.tech/project/kuhl-haus-mdp-servers)
|
|
29
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/branches)
|
|
30
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/issues)
|
|
31
|
+
[](https://github.com/kuhl-haus/kuhl-haus-mdp-servers/pulls)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
# kuhl-haus-mdp-servers
|
|
36
|
+
Container image build repository for market data processing servers
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
setup.cfg
|
|
4
|
+
setup.py
|
|
5
|
+
src/kuhl_haus/servers/__init__.py
|
|
6
|
+
src/kuhl_haus/servers/mdl_server.py
|
|
7
|
+
src/kuhl_haus/servers/mdp_server.py
|
|
8
|
+
src/kuhl_haus/servers/wds_server.py
|
|
9
|
+
src/kuhl_haus_mdp_servers.egg-info/PKG-INFO
|
|
10
|
+
src/kuhl_haus_mdp_servers.egg-info/SOURCES.txt
|
|
11
|
+
src/kuhl_haus_mdp_servers.egg-info/dependency_links.txt
|
|
12
|
+
src/kuhl_haus_mdp_servers.egg-info/entry_points.txt
|
|
13
|
+
src/kuhl_haus_mdp_servers.egg-info/requires.txt
|
|
14
|
+
src/kuhl_haus_mdp_servers.egg-info/top_level.txt
|
|
15
|
+
src/kuhl_haus_mdp_servers.egg-info/zip-safe
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
kuhl_haus
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|