kuhl-haus-mdp 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kuhl_haus/mdp/__init__.py +10 -0
- kuhl_haus/mdp/analyzers/__init__.py +0 -0
- kuhl_haus/mdp/analyzers/analyzer.py +15 -0
- kuhl_haus/mdp/analyzers/massive_data_analyzer.py +102 -0
- kuhl_haus/mdp/analyzers/top_stocks.py +408 -0
- kuhl_haus/mdp/components/__init__.py +0 -0
- kuhl_haus/mdp/components/market_data_cache.py +29 -0
- kuhl_haus/mdp/components/market_data_scanner.py +236 -0
- kuhl_haus/mdp/components/widget_data_service.py +191 -0
- kuhl_haus/mdp/helpers/__init__.py +0 -0
- kuhl_haus/mdp/helpers/process_manager.py +228 -0
- kuhl_haus/mdp/helpers/queue_name_resolver.py +24 -0
- kuhl_haus/mdp/integ/__init__.py +0 -0
- kuhl_haus/mdp/integ/massive_data_listener.py +140 -0
- kuhl_haus/mdp/integ/massive_data_processor.py +236 -0
- kuhl_haus/mdp/integ/massive_data_queues.py +124 -0
- kuhl_haus/mdp/integ/utils.py +27 -0
- kuhl_haus/mdp/integ/web_socket_message_serde.py +143 -0
- kuhl_haus/mdp/models/__init__.py +0 -0
- kuhl_haus/mdp/models/market_data_analyzer_result.py +16 -0
- kuhl_haus/mdp/models/market_data_cache_keys.py +28 -0
- kuhl_haus/mdp/models/market_data_pubsub_keys.py +27 -0
- kuhl_haus/mdp/models/market_data_scanner_names.py +10 -0
- kuhl_haus/mdp/models/massive_data_queue.py +10 -0
- kuhl_haus_mdp-0.0.1.dist-info/METADATA +79 -0
- kuhl_haus_mdp-0.0.1.dist-info/RECORD +29 -0
- kuhl_haus_mdp-0.0.1.dist-info/WHEEL +4 -0
- kuhl_haus_mdp-0.0.1.dist-info/entry_points.txt +4 -0
- kuhl_haus_mdp-0.0.1.dist-info/licenses/LICENSE.txt +21 -0
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from zoneinfo import ZoneInfo
|
|
4
|
+
from logging import Logger
|
|
5
|
+
from typing import Awaitable, Callable, Optional, List, Union
|
|
6
|
+
|
|
7
|
+
from massive import WebSocketClient
|
|
8
|
+
from massive.websocket import Feed, Market, WebSocketMessage
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class MassiveDataListener:
|
|
12
|
+
connection_status: dict
|
|
13
|
+
ws_connection: Union[WebSocketClient, None]
|
|
14
|
+
ws_coroutine: Union[asyncio.Task, None]
|
|
15
|
+
feed: Feed
|
|
16
|
+
market: Market
|
|
17
|
+
subscriptions: List[str]
|
|
18
|
+
raw: bool
|
|
19
|
+
verbose: bool
|
|
20
|
+
max_reconnects: Optional[int]
|
|
21
|
+
secure: bool
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
logger: Logger,
|
|
26
|
+
message_handler: Union[
|
|
27
|
+
Callable[[List[WebSocketMessage]], Awaitable],
|
|
28
|
+
Callable[[Union[str, bytes]], Awaitable],
|
|
29
|
+
],
|
|
30
|
+
api_key: str,
|
|
31
|
+
feed: Feed,
|
|
32
|
+
market: Market,
|
|
33
|
+
subscriptions: List[str],
|
|
34
|
+
raw: bool = False,
|
|
35
|
+
verbose: bool = False,
|
|
36
|
+
max_reconnects: Optional[int] = 5,
|
|
37
|
+
secure: bool = True,
|
|
38
|
+
**kwargs,
|
|
39
|
+
):
|
|
40
|
+
self.logger = logger
|
|
41
|
+
self.message_handler = message_handler
|
|
42
|
+
self.api_key = api_key
|
|
43
|
+
self.feed = feed
|
|
44
|
+
self.market = market
|
|
45
|
+
self.subscriptions = subscriptions
|
|
46
|
+
self.raw = raw
|
|
47
|
+
self.verbose = verbose
|
|
48
|
+
self.max_reconnects = max_reconnects
|
|
49
|
+
self.secure = secure
|
|
50
|
+
self.kwargs = kwargs
|
|
51
|
+
self.connection_status = {
|
|
52
|
+
"connected": False,
|
|
53
|
+
"feed": feed,
|
|
54
|
+
"market": market,
|
|
55
|
+
"subscriptions": subscriptions,
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
async def start(self):
|
|
59
|
+
"""Start WebSocket client"""
|
|
60
|
+
try:
|
|
61
|
+
self.logger.info("Instantiating WebSocket client...")
|
|
62
|
+
self.ws_connection = WebSocketClient(
|
|
63
|
+
api_key=self.api_key,
|
|
64
|
+
feed=self.feed,
|
|
65
|
+
market=self.market,
|
|
66
|
+
raw=self.raw,
|
|
67
|
+
verbose=self.verbose,
|
|
68
|
+
subscriptions=self.subscriptions,
|
|
69
|
+
max_reconnects=self.max_reconnects,
|
|
70
|
+
secure=self.secure,
|
|
71
|
+
**self.kwargs,
|
|
72
|
+
)
|
|
73
|
+
self.logger.info("Scheduling WebSocket client task...")
|
|
74
|
+
self.ws_coroutine = asyncio.create_task(self.async_task())
|
|
75
|
+
except Exception as e:
|
|
76
|
+
self.logger.error(f"Error starting WebSocket client: {e}")
|
|
77
|
+
await self.stop()
|
|
78
|
+
|
|
79
|
+
async def stop(self):
|
|
80
|
+
"""Stop WebSocket client"""
|
|
81
|
+
try:
|
|
82
|
+
self.logger.info("Shutting down WebSocket client...")
|
|
83
|
+
self.ws_coroutine.cancel()
|
|
84
|
+
await asyncio.sleep(1)
|
|
85
|
+
self.logger.info("unsubscribing from all feeds...")
|
|
86
|
+
self.ws_connection.unsubscribe_all()
|
|
87
|
+
await asyncio.sleep(1)
|
|
88
|
+
self.logger.info("closing connection...")
|
|
89
|
+
await self.ws_connection.close()
|
|
90
|
+
self.logger.info("done.")
|
|
91
|
+
except Exception as e:
|
|
92
|
+
self.logger.error(f"Error stopping WebSocket client: {e}")
|
|
93
|
+
self.connection_status["connected"] = False
|
|
94
|
+
self.ws_connection = None
|
|
95
|
+
self.ws_coroutine = None
|
|
96
|
+
|
|
97
|
+
async def restart(self):
|
|
98
|
+
"""Restart WebSocket client"""
|
|
99
|
+
try:
|
|
100
|
+
self.logger.info("Stopping WebSocket client...")
|
|
101
|
+
await self.stop()
|
|
102
|
+
self.logger.info("done")
|
|
103
|
+
await asyncio.sleep(1)
|
|
104
|
+
self.logger.info("Starting WebSocket client...")
|
|
105
|
+
await self.start()
|
|
106
|
+
self.logger.info("done")
|
|
107
|
+
except Exception as e:
|
|
108
|
+
self.logger.error(f"Error restarting WebSocket client: {e}")
|
|
109
|
+
|
|
110
|
+
async def async_task(self):
|
|
111
|
+
"""Main task that runs the WebSocket client"""
|
|
112
|
+
try:
|
|
113
|
+
self.logger.info("Connecting to market data provider...")
|
|
114
|
+
self.connection_status["connected"] = True
|
|
115
|
+
await asyncio.gather(
|
|
116
|
+
self.ws_connection.connect(self.message_handler),
|
|
117
|
+
return_exceptions=True
|
|
118
|
+
)
|
|
119
|
+
self.connection_status["connected"] = False
|
|
120
|
+
self.logger.info("Disconnected from market data provider...")
|
|
121
|
+
pending_restart = True
|
|
122
|
+
while pending_restart:
|
|
123
|
+
# Get current time in UTC, then convert to Eastern Time
|
|
124
|
+
utc_now = datetime.now(timezone.utc)
|
|
125
|
+
et_now = utc_now.astimezone(ZoneInfo("America/New_York"))
|
|
126
|
+
|
|
127
|
+
# Check if within trading hours: Mon-Fri, 04:00-19:59 ET
|
|
128
|
+
is_weekday = et_now.weekday() < 5
|
|
129
|
+
is_trading_hours = 4 <= et_now.hour < 20
|
|
130
|
+
|
|
131
|
+
if is_weekday and is_trading_hours:
|
|
132
|
+
self.logger.info(f"Reconnecting at {et_now.strftime('%H:%M:%S %Z')}...")
|
|
133
|
+
await self.start()
|
|
134
|
+
pending_restart = False
|
|
135
|
+
else:
|
|
136
|
+
self.logger.info(f"Outside market hours ({et_now.strftime('%H:%M:%S %Z')}), sleeping 5 min...")
|
|
137
|
+
await asyncio.sleep(300)
|
|
138
|
+
except Exception as e:
|
|
139
|
+
self.logger.error(f"Fatal error in WebSocket client: {e}")
|
|
140
|
+
await self.stop()
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Dict
|
|
5
|
+
|
|
6
|
+
import aio_pika
|
|
7
|
+
import redis
|
|
8
|
+
import redis.asyncio as aioredis
|
|
9
|
+
from aio_pika.abc import AbstractIncomingMessage
|
|
10
|
+
from aio_pika.exceptions import AMQPConnectionError
|
|
11
|
+
|
|
12
|
+
from kuhl_haus.mdp.analyzers.massive_data_analyzer import MassiveDataAnalyzer
|
|
13
|
+
from kuhl_haus.mdp.models.market_data_analyzer_result import MarketDataAnalyzerResult
|
|
14
|
+
from kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class MassiveDataProcessor:
|
|
18
|
+
queue_name: str
|
|
19
|
+
mdq_connected: bool
|
|
20
|
+
mdc_connected: bool
|
|
21
|
+
processed: int
|
|
22
|
+
duplicated: int
|
|
23
|
+
decoding_error: int
|
|
24
|
+
dropped: int
|
|
25
|
+
error: int
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
rabbitmq_url: str,
|
|
30
|
+
queue_name: str,
|
|
31
|
+
redis_url: str,
|
|
32
|
+
prefetch_count: int = 100, # Higher for async throughput
|
|
33
|
+
max_concurrent_tasks: int = 500, # Concurrent processing limit
|
|
34
|
+
):
|
|
35
|
+
self.rabbitmq_url = rabbitmq_url
|
|
36
|
+
self.queue_name = queue_name
|
|
37
|
+
self.redis_url = redis_url
|
|
38
|
+
self.prefetch_count = prefetch_count
|
|
39
|
+
self.max_concurrent_tasks = max_concurrent_tasks
|
|
40
|
+
|
|
41
|
+
# Connection objects
|
|
42
|
+
self.rmq_connection = None
|
|
43
|
+
self.rmq_channel = None
|
|
44
|
+
self.redis_client = None
|
|
45
|
+
|
|
46
|
+
# Analyzer
|
|
47
|
+
self.analyzer = MassiveDataAnalyzer()
|
|
48
|
+
|
|
49
|
+
# Concurrency control
|
|
50
|
+
self.semaphore = asyncio.Semaphore(max_concurrent_tasks)
|
|
51
|
+
self.processing_tasks = set()
|
|
52
|
+
|
|
53
|
+
# State
|
|
54
|
+
self.running = False
|
|
55
|
+
self.logger = logging.getLogger(__name__)
|
|
56
|
+
|
|
57
|
+
# Metrics
|
|
58
|
+
self.processed = 0
|
|
59
|
+
self.duplicated = 0
|
|
60
|
+
self.error = 0
|
|
61
|
+
self.decoding_error = 0
|
|
62
|
+
self.dropped = 0
|
|
63
|
+
self.mdq_connected = False
|
|
64
|
+
self.mdc_connected = False
|
|
65
|
+
|
|
66
|
+
async def connect(self, force: bool = False):
|
|
67
|
+
"""Establish async connections to RabbitMQ and Redis"""
|
|
68
|
+
|
|
69
|
+
if not self.mdq_connected or force:
|
|
70
|
+
# RabbitMQ connection
|
|
71
|
+
try:
|
|
72
|
+
self.rmq_connection = await aio_pika.connect_robust(
|
|
73
|
+
self.rabbitmq_url,
|
|
74
|
+
heartbeat=60,
|
|
75
|
+
timeout=30, # Add connection timeout
|
|
76
|
+
)
|
|
77
|
+
self.rmq_channel = await self.rmq_connection.channel()
|
|
78
|
+
await self.rmq_channel.set_qos(prefetch_count=self.prefetch_count)
|
|
79
|
+
await self.rmq_channel.get_queue(self.queue_name, ensure=False)
|
|
80
|
+
self.mdq_connected = True
|
|
81
|
+
self.logger.info(f"Connected to RabbitMQ queue: {self.queue_name}")
|
|
82
|
+
except Exception as e:
|
|
83
|
+
self.logger.error(f"Failed to connect to RabbitMQ: {e}")
|
|
84
|
+
raise
|
|
85
|
+
|
|
86
|
+
if not self.mdc_connected or force:
|
|
87
|
+
# Redis connection pool
|
|
88
|
+
try:
|
|
89
|
+
self.redis_client = aioredis.from_url(
|
|
90
|
+
self.redis_url,
|
|
91
|
+
encoding="utf-8",
|
|
92
|
+
decode_responses=True,
|
|
93
|
+
max_connections=1000,
|
|
94
|
+
socket_connect_timeout=10, # Add timeout
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
# Test Redis connection
|
|
98
|
+
await self.redis_client.ping()
|
|
99
|
+
self.mdc_connected = True
|
|
100
|
+
self.logger.debug(f"Connected to Redis: {self.redis_url}")
|
|
101
|
+
except Exception as e:
|
|
102
|
+
self.logger.error(f"Failed to connect to Redis: {e}")
|
|
103
|
+
# Cleanup RabbitMQ connection on Redis failure
|
|
104
|
+
await self.rmq_channel.close()
|
|
105
|
+
await self.rmq_connection.close()
|
|
106
|
+
raise
|
|
107
|
+
|
|
108
|
+
async def _process_message(self, message: AbstractIncomingMessage):
|
|
109
|
+
"""Process single message with concurrency control"""
|
|
110
|
+
async with self.semaphore:
|
|
111
|
+
try:
|
|
112
|
+
async with message.process():
|
|
113
|
+
# Parse message
|
|
114
|
+
web_socket_message = json.loads(message.body.decode())
|
|
115
|
+
data = WebSocketMessageSerde.to_dict(web_socket_message)
|
|
116
|
+
|
|
117
|
+
# Delegate to analyzer (async)
|
|
118
|
+
analyzer_results = await self.analyzer.analyze_data(data)
|
|
119
|
+
if analyzer_results:
|
|
120
|
+
self.processed += 1
|
|
121
|
+
for analyzer_result in analyzer_results:
|
|
122
|
+
# Cache in Redis
|
|
123
|
+
await self._cache_result(analyzer_result)
|
|
124
|
+
|
|
125
|
+
self.logger.debug(f"Processed message {message.delivery_tag}")
|
|
126
|
+
else:
|
|
127
|
+
# Empty result - drop message
|
|
128
|
+
self.dropped += 1
|
|
129
|
+
self.logger.debug(
|
|
130
|
+
f"Analyzer returned empty for {message.delivery_tag}"
|
|
131
|
+
)
|
|
132
|
+
except aio_pika.exceptions.MessageProcessError as e:
|
|
133
|
+
self.logger.error(f"Message processing error: {e}")
|
|
134
|
+
self.duplicated += 1
|
|
135
|
+
except json.JSONDecodeError as e:
|
|
136
|
+
self.logger.error(f"JSON decode error: {e}")
|
|
137
|
+
self.decoding_error += 1
|
|
138
|
+
except Exception as e:
|
|
139
|
+
self.logger.error(f"Processing error: {e}", exc_info=True)
|
|
140
|
+
self.error += 1
|
|
141
|
+
|
|
142
|
+
async def _callback(self, message: AbstractIncomingMessage):
|
|
143
|
+
"""
|
|
144
|
+
Message callback - spawns processing task
|
|
145
|
+
|
|
146
|
+
Note: Tasks tracked for graceful shutdown
|
|
147
|
+
"""
|
|
148
|
+
task = asyncio.create_task(self._process_message(message))
|
|
149
|
+
self.processing_tasks.add(task)
|
|
150
|
+
task.add_done_callback(self.processing_tasks.discard)
|
|
151
|
+
|
|
152
|
+
async def _cache_result(self, analyzer_result: MarketDataAnalyzerResult):
|
|
153
|
+
"""
|
|
154
|
+
Async cache to Redis with pub/sub notification
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
result: Processed data dict from analyzer
|
|
158
|
+
cache_entry_name: Cache key suffix (e.g., symbol)
|
|
159
|
+
"""
|
|
160
|
+
result_json = json.dumps(analyzer_result.data)
|
|
161
|
+
|
|
162
|
+
# Pipeline - no async context manager, no await on queue methods
|
|
163
|
+
pipe = self.redis_client.pipeline(transaction=False)
|
|
164
|
+
if analyzer_result.cache_key:
|
|
165
|
+
if analyzer_result.cache_ttl > 0:
|
|
166
|
+
pipe.setex(analyzer_result.cache_key, analyzer_result.cache_ttl, result_json)
|
|
167
|
+
else:
|
|
168
|
+
pipe.set(analyzer_result.cache_key, result_json)
|
|
169
|
+
if analyzer_result.publish_key:
|
|
170
|
+
pipe.publish(analyzer_result.publish_key, result_json)
|
|
171
|
+
|
|
172
|
+
await pipe.execute()
|
|
173
|
+
|
|
174
|
+
self.logger.debug(f"Cached result for {analyzer_result.cache_key}")
|
|
175
|
+
|
|
176
|
+
async def start(self):
|
|
177
|
+
"""Start async message consumption"""
|
|
178
|
+
retry_count = 0
|
|
179
|
+
while not self.mdc_connected or not self.mdq_connected:
|
|
180
|
+
try:
|
|
181
|
+
await self.connect()
|
|
182
|
+
except Exception as e:
|
|
183
|
+
if retry_count < 5:
|
|
184
|
+
retry_count += 1
|
|
185
|
+
self.logger.error(f"Connection error: {e}, sleeping for {2*retry_count}s")
|
|
186
|
+
await asyncio.sleep(2*retry_count)
|
|
187
|
+
else:
|
|
188
|
+
self.logger.error("Failed to connect to RabbitMQ or Redis")
|
|
189
|
+
raise
|
|
190
|
+
if self.mdc_connected and self.mdq_connected:
|
|
191
|
+
self.running = True
|
|
192
|
+
else:
|
|
193
|
+
self.logger.error("Failed to connect to RabbitMQ or Redis")
|
|
194
|
+
raise RuntimeError("Failed to connect to RabbitMQ or Redis")
|
|
195
|
+
|
|
196
|
+
# Get queue
|
|
197
|
+
queue = await self.rmq_channel.get_queue(self.queue_name)
|
|
198
|
+
|
|
199
|
+
self.logger.info("Starting async message consumption")
|
|
200
|
+
|
|
201
|
+
# Start consuming with callback
|
|
202
|
+
await queue.consume(self._callback, no_ack=False)
|
|
203
|
+
|
|
204
|
+
# Run until shutdown signal
|
|
205
|
+
try:
|
|
206
|
+
while self.running:
|
|
207
|
+
await asyncio.sleep(1)
|
|
208
|
+
except asyncio.CancelledError:
|
|
209
|
+
self.logger.info("Consumption cancelled")
|
|
210
|
+
finally:
|
|
211
|
+
await self.stop()
|
|
212
|
+
|
|
213
|
+
async def stop(self):
|
|
214
|
+
"""Graceful async shutdown"""
|
|
215
|
+
self.logger.info("Stopping processor - waiting for pending tasks")
|
|
216
|
+
self.running = False
|
|
217
|
+
|
|
218
|
+
# Wait for all processing tasks to complete
|
|
219
|
+
if self.processing_tasks:
|
|
220
|
+
self.logger.info(f"Waiting for {len(self.processing_tasks)} tasks")
|
|
221
|
+
await asyncio.gather(*self.processing_tasks, return_exceptions=True)
|
|
222
|
+
|
|
223
|
+
# Close connections
|
|
224
|
+
if self.rmq_channel:
|
|
225
|
+
await self.rmq_channel.close()
|
|
226
|
+
|
|
227
|
+
if self.rmq_connection:
|
|
228
|
+
await self.rmq_connection.close()
|
|
229
|
+
|
|
230
|
+
if self.redis_client:
|
|
231
|
+
await self.redis_client.close()
|
|
232
|
+
|
|
233
|
+
self.logger.info(
|
|
234
|
+
f"Processor stopped - Processed: {self.processed}, "
|
|
235
|
+
f"Errors: {self.error}"
|
|
236
|
+
)
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import List, Union
|
|
3
|
+
|
|
4
|
+
from aio_pika import Connection, Channel, connect_robust, Message
|
|
5
|
+
from aio_pika import DeliveryMode
|
|
6
|
+
from aio_pika.abc import AbstractConnection, AbstractChannel
|
|
7
|
+
from massive.websocket.models import WebSocketMessage
|
|
8
|
+
|
|
9
|
+
from kuhl_haus.mdp.models.massive_data_queue import MassiveDataQueue
|
|
10
|
+
from kuhl_haus.mdp.helpers.queue_name_resolver import QueueNameResolver
|
|
11
|
+
from kuhl_haus.mdp.integ.web_socket_message_serde import WebSocketMessageSerde
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class MassiveDataQueues:
|
|
15
|
+
rabbitmq_url: str
|
|
16
|
+
queues: List[str]
|
|
17
|
+
message_ttl: int
|
|
18
|
+
connection: Union[Connection, AbstractConnection]
|
|
19
|
+
channel: Union[Channel, AbstractChannel]
|
|
20
|
+
connection_status: dict
|
|
21
|
+
|
|
22
|
+
def __init__(self, logger, rabbitmq_url, message_ttl: int):
|
|
23
|
+
self.logger = logger
|
|
24
|
+
self.rabbitmq_url = rabbitmq_url
|
|
25
|
+
self.queues = [
|
|
26
|
+
MassiveDataQueue.TRADES.value,
|
|
27
|
+
MassiveDataQueue.AGGREGATE.value,
|
|
28
|
+
MassiveDataQueue.QUOTES.value,
|
|
29
|
+
MassiveDataQueue.HALTS.value,
|
|
30
|
+
MassiveDataQueue.NEWS.value,
|
|
31
|
+
MassiveDataQueue.UNKNOWN.value,
|
|
32
|
+
]
|
|
33
|
+
self.message_ttl = message_ttl
|
|
34
|
+
self.connection_status = {
|
|
35
|
+
"connected": False,
|
|
36
|
+
"last_message_time": None,
|
|
37
|
+
"messages_received": 0,
|
|
38
|
+
MassiveDataQueue.TRADES.value: 0,
|
|
39
|
+
MassiveDataQueue.AGGREGATE.value: 0,
|
|
40
|
+
MassiveDataQueue.QUOTES.value: 0,
|
|
41
|
+
MassiveDataQueue.HALTS.value: 0,
|
|
42
|
+
MassiveDataQueue.NEWS.value: 0,
|
|
43
|
+
MassiveDataQueue.UNKNOWN.value: 0,
|
|
44
|
+
"unsupported_messages": 0,
|
|
45
|
+
"reconnect_attempts": 0,
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async def connect(self):
|
|
49
|
+
self.connection = await connect_robust(self.rabbitmq_url)
|
|
50
|
+
self.channel = await self.connection.channel()
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
for q in self.queues:
|
|
54
|
+
_ = await self.channel.declare_queue(q, passive=True) # Don't create, just check
|
|
55
|
+
|
|
56
|
+
self.connection_status["connected"] = self.connection is not None and self.channel is not None
|
|
57
|
+
except Exception as e:
|
|
58
|
+
self.logger.error(f"Fatal error while processing request: {e}")
|
|
59
|
+
raise
|
|
60
|
+
|
|
61
|
+
async def handle_messages(self, msgs: List[WebSocketMessage]):
|
|
62
|
+
if not self.channel:
|
|
63
|
+
self.logger.error("RabbitMQ channel not initialized")
|
|
64
|
+
raise Exception("RabbitMQ channel not initialized")
|
|
65
|
+
if not self.connection:
|
|
66
|
+
self.logger.error("RabbitMQ connection not initialized")
|
|
67
|
+
raise Exception("RabbitMQ connection not initialized")
|
|
68
|
+
try:
|
|
69
|
+
for message in msgs:
|
|
70
|
+
await self.fanout_to_queues(message)
|
|
71
|
+
except Exception as e:
|
|
72
|
+
self.logger.error(f"Fatal error while processing messages: {e}")
|
|
73
|
+
raise
|
|
74
|
+
|
|
75
|
+
async def shutdown(self):
|
|
76
|
+
self.connection_status["connected"] = False
|
|
77
|
+
self.logger.info("Closing RabbitMQ channel")
|
|
78
|
+
await self.channel.close()
|
|
79
|
+
self.logger.info("RabbitMQ channel closed")
|
|
80
|
+
self.logger.info("Closing RabbitMQ connection")
|
|
81
|
+
await self.connection.close()
|
|
82
|
+
self.logger.info("RabbitMQ connection closed")
|
|
83
|
+
|
|
84
|
+
async def setup_queues(self):
|
|
85
|
+
self.connection = await connect_robust(self.rabbitmq_url)
|
|
86
|
+
self.channel = await self.connection.channel()
|
|
87
|
+
|
|
88
|
+
# Declare queues with message TTL
|
|
89
|
+
for queue in self.queues:
|
|
90
|
+
await self.channel.declare_queue(
|
|
91
|
+
queue,
|
|
92
|
+
durable=True,
|
|
93
|
+
arguments={"x-message-ttl": self.message_ttl} # Messages are deleted after they expire
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
self.logger.info(f"{queue} queue created with {self.message_ttl}ms TTL")
|
|
97
|
+
self.connection_status["connected"] = self.connection is not None and self.channel is not None
|
|
98
|
+
|
|
99
|
+
async def fanout_to_queues(self, message: WebSocketMessage):
|
|
100
|
+
try:
|
|
101
|
+
self.logger.debug(f"Received message: {message}")
|
|
102
|
+
self.connection_status["messages_received"] += 1
|
|
103
|
+
self.connection_status["last_message_time"] = datetime.now().isoformat()
|
|
104
|
+
|
|
105
|
+
serialized_message = WebSocketMessageSerde.serialize(message)
|
|
106
|
+
self.logger.debug(f"Serialized message: {serialized_message}")
|
|
107
|
+
|
|
108
|
+
encoded_message = serialized_message.encode()
|
|
109
|
+
rabbit_message = Message(
|
|
110
|
+
body=encoded_message,
|
|
111
|
+
delivery_mode=DeliveryMode.PERSISTENT, # Survive broker restart
|
|
112
|
+
content_type="application/json",
|
|
113
|
+
timestamp=datetime.now(),
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
# Publish to event-specific queues
|
|
117
|
+
queue_name = QueueNameResolver.queue_name_for_web_socket_message(message)
|
|
118
|
+
self.logger.debug(f"Queue name: {queue_name}")
|
|
119
|
+
|
|
120
|
+
await self.channel.default_exchange.publish(rabbit_message, routing_key=queue_name)
|
|
121
|
+
self.connection_status[queue_name] += 1
|
|
122
|
+
|
|
123
|
+
except Exception as e:
|
|
124
|
+
self.logger.error(f"Error publishing to RabbitMQ: {e}")
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
logging.basicConfig(
|
|
6
|
+
level=logging.INFO,
|
|
7
|
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
8
|
+
)
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_massive_api_key():
|
|
13
|
+
logger.info("Getting Massive API key...")
|
|
14
|
+
api_key = os.environ.get("MASSIVE_API_KEY")
|
|
15
|
+
if not api_key:
|
|
16
|
+
logger.info("MASSIVE_API_KEY environment variable not set; trying Massive API key...")
|
|
17
|
+
api_key = os.environ.get("POLYGON_API_KEY")
|
|
18
|
+
if not api_key:
|
|
19
|
+
logger.info("POLYGON_API_KEY environment variable not set; trying Massive API key file...")
|
|
20
|
+
api_key_path = '/app/polygon_api_key.txt'
|
|
21
|
+
with open(api_key_path, 'r') as f:
|
|
22
|
+
api_key = f.read().strip()
|
|
23
|
+
if not api_key:
|
|
24
|
+
logger.error("No Massive API key found")
|
|
25
|
+
raise ValueError("MASSIVE_API_KEY environment variable not set")
|
|
26
|
+
logger.info("Done.")
|
|
27
|
+
return api_key
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from argparse import ArgumentTypeError
|
|
3
|
+
from typing import Union
|
|
4
|
+
|
|
5
|
+
from massive.websocket.models import (
|
|
6
|
+
WebSocketMessage,
|
|
7
|
+
EquityAgg,
|
|
8
|
+
EquityQuote,
|
|
9
|
+
EquityTrade,
|
|
10
|
+
LimitUpLimitDown,
|
|
11
|
+
EventType
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class WebSocketMessageSerde:
|
|
16
|
+
@staticmethod
|
|
17
|
+
def serialize(message: WebSocketMessage) -> str:
|
|
18
|
+
if isinstance(message, EquityTrade):
|
|
19
|
+
return WebSocketMessageSerde.serialize_equity_trade(message)
|
|
20
|
+
elif isinstance(message, EquityAgg):
|
|
21
|
+
return WebSocketMessageSerde.serialize_equity_agg(message)
|
|
22
|
+
elif isinstance(message, EquityQuote):
|
|
23
|
+
return WebSocketMessageSerde.serialize_equity_quote(message)
|
|
24
|
+
elif isinstance(message, LimitUpLimitDown):
|
|
25
|
+
return WebSocketMessageSerde.serialize_limit_up_limit_down(message)
|
|
26
|
+
else:
|
|
27
|
+
return json.dumps(message)
|
|
28
|
+
|
|
29
|
+
@staticmethod
|
|
30
|
+
def to_dict(message: WebSocketMessage) -> dict:
|
|
31
|
+
if isinstance(message, EquityTrade):
|
|
32
|
+
return WebSocketMessageSerde.decode_equity_trade(message)
|
|
33
|
+
elif isinstance(message, EquityAgg):
|
|
34
|
+
return WebSocketMessageSerde.decode_equity_agg(message)
|
|
35
|
+
elif isinstance(message, EquityQuote):
|
|
36
|
+
return WebSocketMessageSerde.decode_equity_quote(message)
|
|
37
|
+
elif isinstance(message, LimitUpLimitDown):
|
|
38
|
+
return WebSocketMessageSerde.decode_limit_up_limit_down(message)
|
|
39
|
+
else:
|
|
40
|
+
return json.loads(json.dumps(message))
|
|
41
|
+
|
|
42
|
+
@staticmethod
|
|
43
|
+
def deserialize(serialized_message: str) -> Union[LimitUpLimitDown, EquityAgg, EquityTrade, EquityQuote]:
|
|
44
|
+
message: dict = json.loads(serialized_message)
|
|
45
|
+
event_type = message.get("event_type")
|
|
46
|
+
if event_type == EventType.LimitUpLimitDown.value:
|
|
47
|
+
return LimitUpLimitDown(**message)
|
|
48
|
+
elif event_type == EventType.EquityAgg.value:
|
|
49
|
+
return EquityAgg(**message)
|
|
50
|
+
elif event_type == EventType.EquityTrade.value:
|
|
51
|
+
return EquityTrade(**message)
|
|
52
|
+
elif event_type == EventType.EquityQuote.value:
|
|
53
|
+
return EquityQuote(**message)
|
|
54
|
+
else:
|
|
55
|
+
raise ArgumentTypeError(f"Unsupported message type: {event_type}")
|
|
56
|
+
|
|
57
|
+
@staticmethod
|
|
58
|
+
def decode_limit_up_limit_down(message: LimitUpLimitDown) -> dict:
|
|
59
|
+
ret: dict = {
|
|
60
|
+
"event_type": message.event_type,
|
|
61
|
+
"symbol": message.symbol,
|
|
62
|
+
"high": message.high_price,
|
|
63
|
+
"low": message.low_price,
|
|
64
|
+
"indicators": message.indicators,
|
|
65
|
+
"tape": message.tape,
|
|
66
|
+
"timestamp": message.timestamp,
|
|
67
|
+
"sequence_number": message.sequence_number,
|
|
68
|
+
}
|
|
69
|
+
return ret
|
|
70
|
+
|
|
71
|
+
@staticmethod
|
|
72
|
+
def serialize_limit_up_limit_down(message: LimitUpLimitDown) -> str:
|
|
73
|
+
return json.dumps(WebSocketMessageSerde.decode_limit_up_limit_down(message))
|
|
74
|
+
|
|
75
|
+
@staticmethod
|
|
76
|
+
def decode_equity_agg(message: EquityAgg) -> dict:
|
|
77
|
+
ret: dict = {
|
|
78
|
+
"event_type": message.event_type,
|
|
79
|
+
"symbol": message.symbol,
|
|
80
|
+
"volume": message.volume,
|
|
81
|
+
"accumulated_volume": message.accumulated_volume,
|
|
82
|
+
"official_open_price": message.official_open_price,
|
|
83
|
+
"vwap": message.vwap,
|
|
84
|
+
"open": message.open,
|
|
85
|
+
"close": message.close,
|
|
86
|
+
"high": message.high,
|
|
87
|
+
"low": message.low,
|
|
88
|
+
"aggregate_vwap": message.aggregate_vwap,
|
|
89
|
+
"average_size": message.average_size,
|
|
90
|
+
"start_timestamp": message.start_timestamp,
|
|
91
|
+
"end_timestamp": message.end_timestamp,
|
|
92
|
+
"otc": message.otc,
|
|
93
|
+
}
|
|
94
|
+
return ret
|
|
95
|
+
|
|
96
|
+
@staticmethod
|
|
97
|
+
def serialize_equity_agg(message: EquityAgg) -> str:
|
|
98
|
+
return json.dumps(WebSocketMessageSerde.decode_equity_agg(message))
|
|
99
|
+
|
|
100
|
+
@staticmethod
|
|
101
|
+
def decode_equity_trade(message: EquityTrade) -> dict:
|
|
102
|
+
ret: dict = {
|
|
103
|
+
"event_type": message.event_type,
|
|
104
|
+
"symbol": message.symbol,
|
|
105
|
+
"exchange": message.exchange,
|
|
106
|
+
"id": message.id,
|
|
107
|
+
"tape": message.tape,
|
|
108
|
+
"price": message.price,
|
|
109
|
+
"size": message.size,
|
|
110
|
+
"conditions": message.conditions,
|
|
111
|
+
"timestamp": message.timestamp,
|
|
112
|
+
"sequence_number": message.sequence_number,
|
|
113
|
+
"trf_id": message.trf_id,
|
|
114
|
+
"trf_timestamp": message.trf_timestamp
|
|
115
|
+
}
|
|
116
|
+
return ret
|
|
117
|
+
|
|
118
|
+
@staticmethod
|
|
119
|
+
def serialize_equity_trade(message: EquityTrade) -> str:
|
|
120
|
+
return json.dumps(WebSocketMessageSerde.decode_equity_trade(message))
|
|
121
|
+
|
|
122
|
+
@staticmethod
|
|
123
|
+
def decode_equity_quote(message: EquityQuote) -> dict:
|
|
124
|
+
ret: dict = {
|
|
125
|
+
"event_type": message.event_type,
|
|
126
|
+
"symbol": message.symbol,
|
|
127
|
+
"bid_exchange_id": message.bid_exchange_id,
|
|
128
|
+
"bid_price": message.bid_price,
|
|
129
|
+
"bid_size": message.bid_size,
|
|
130
|
+
"ask_exchange_id": message.ask_exchange_id,
|
|
131
|
+
"ask_price": message.ask_price,
|
|
132
|
+
"ask_size": message.ask_size,
|
|
133
|
+
"condition": message.condition,
|
|
134
|
+
"indicators": message.indicators,
|
|
135
|
+
"timestamp": message.timestamp,
|
|
136
|
+
"sequence_number": message.sequence_number,
|
|
137
|
+
"tape": message.tape,
|
|
138
|
+
}
|
|
139
|
+
return ret
|
|
140
|
+
|
|
141
|
+
@staticmethod
|
|
142
|
+
def serialize_equity_quote(message: EquityQuote) -> str:
|
|
143
|
+
return json.dumps(WebSocketMessageSerde.decode_equity_quote(message))
|
|
File without changes
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any, Optional
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass()
|
|
6
|
+
class MarketDataAnalyzerResult:
|
|
7
|
+
"""
|
|
8
|
+
The object passed as data must be JSON serializable.
|
|
9
|
+
|
|
10
|
+
Refer to the following for more details:
|
|
11
|
+
https://docs.python.org/3.12/library/json.html#py-to-json-table
|
|
12
|
+
"""
|
|
13
|
+
data: Any
|
|
14
|
+
cache_key: Optional[str] = None
|
|
15
|
+
cache_ttl: Optional[int] = 0
|
|
16
|
+
publish_key: Optional[str] = None
|