mcli-framework 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcli-framework might be problematic. Click here for more details.
- mcli/app/chat_cmd.py +42 -0
- mcli/app/commands_cmd.py +226 -0
- mcli/app/completion_cmd.py +216 -0
- mcli/app/completion_helpers.py +288 -0
- mcli/app/cron_test_cmd.py +697 -0
- mcli/app/logs_cmd.py +419 -0
- mcli/app/main.py +492 -0
- mcli/app/model/model.py +1060 -0
- mcli/app/model_cmd.py +227 -0
- mcli/app/redis_cmd.py +269 -0
- mcli/app/video/video.py +1114 -0
- mcli/app/visual_cmd.py +303 -0
- mcli/chat/chat.py +2409 -0
- mcli/chat/command_rag.py +514 -0
- mcli/chat/enhanced_chat.py +652 -0
- mcli/chat/system_controller.py +1010 -0
- mcli/chat/system_integration.py +1016 -0
- mcli/cli.py +25 -0
- mcli/config.toml +20 -0
- mcli/lib/api/api.py +586 -0
- mcli/lib/api/daemon_client.py +203 -0
- mcli/lib/api/daemon_client_local.py +44 -0
- mcli/lib/api/daemon_decorator.py +217 -0
- mcli/lib/api/mcli_decorators.py +1032 -0
- mcli/lib/auth/auth.py +85 -0
- mcli/lib/auth/aws_manager.py +85 -0
- mcli/lib/auth/azure_manager.py +91 -0
- mcli/lib/auth/credential_manager.py +192 -0
- mcli/lib/auth/gcp_manager.py +93 -0
- mcli/lib/auth/key_manager.py +117 -0
- mcli/lib/auth/mcli_manager.py +93 -0
- mcli/lib/auth/token_manager.py +75 -0
- mcli/lib/auth/token_util.py +1011 -0
- mcli/lib/config/config.py +47 -0
- mcli/lib/discovery/__init__.py +1 -0
- mcli/lib/discovery/command_discovery.py +274 -0
- mcli/lib/erd/erd.py +1345 -0
- mcli/lib/erd/generate_graph.py +453 -0
- mcli/lib/files/files.py +76 -0
- mcli/lib/fs/fs.py +109 -0
- mcli/lib/lib.py +29 -0
- mcli/lib/logger/logger.py +611 -0
- mcli/lib/performance/optimizer.py +409 -0
- mcli/lib/performance/rust_bridge.py +502 -0
- mcli/lib/performance/uvloop_config.py +154 -0
- mcli/lib/pickles/pickles.py +50 -0
- mcli/lib/search/cached_vectorizer.py +479 -0
- mcli/lib/services/data_pipeline.py +460 -0
- mcli/lib/services/lsh_client.py +441 -0
- mcli/lib/services/redis_service.py +387 -0
- mcli/lib/shell/shell.py +137 -0
- mcli/lib/toml/toml.py +33 -0
- mcli/lib/ui/styling.py +47 -0
- mcli/lib/ui/visual_effects.py +634 -0
- mcli/lib/watcher/watcher.py +185 -0
- mcli/ml/api/app.py +215 -0
- mcli/ml/api/middleware.py +224 -0
- mcli/ml/api/routers/admin_router.py +12 -0
- mcli/ml/api/routers/auth_router.py +244 -0
- mcli/ml/api/routers/backtest_router.py +12 -0
- mcli/ml/api/routers/data_router.py +12 -0
- mcli/ml/api/routers/model_router.py +302 -0
- mcli/ml/api/routers/monitoring_router.py +12 -0
- mcli/ml/api/routers/portfolio_router.py +12 -0
- mcli/ml/api/routers/prediction_router.py +267 -0
- mcli/ml/api/routers/trade_router.py +12 -0
- mcli/ml/api/routers/websocket_router.py +76 -0
- mcli/ml/api/schemas.py +64 -0
- mcli/ml/auth/auth_manager.py +425 -0
- mcli/ml/auth/models.py +154 -0
- mcli/ml/auth/permissions.py +302 -0
- mcli/ml/backtesting/backtest_engine.py +502 -0
- mcli/ml/backtesting/performance_metrics.py +393 -0
- mcli/ml/cache.py +400 -0
- mcli/ml/cli/main.py +398 -0
- mcli/ml/config/settings.py +394 -0
- mcli/ml/configs/dvc_config.py +230 -0
- mcli/ml/configs/mlflow_config.py +131 -0
- mcli/ml/configs/mlops_manager.py +293 -0
- mcli/ml/dashboard/app.py +532 -0
- mcli/ml/dashboard/app_integrated.py +738 -0
- mcli/ml/dashboard/app_supabase.py +560 -0
- mcli/ml/dashboard/app_training.py +615 -0
- mcli/ml/dashboard/cli.py +51 -0
- mcli/ml/data_ingestion/api_connectors.py +501 -0
- mcli/ml/data_ingestion/data_pipeline.py +567 -0
- mcli/ml/data_ingestion/stream_processor.py +512 -0
- mcli/ml/database/migrations/env.py +94 -0
- mcli/ml/database/models.py +667 -0
- mcli/ml/database/session.py +200 -0
- mcli/ml/experimentation/ab_testing.py +845 -0
- mcli/ml/features/ensemble_features.py +607 -0
- mcli/ml/features/political_features.py +676 -0
- mcli/ml/features/recommendation_engine.py +809 -0
- mcli/ml/features/stock_features.py +573 -0
- mcli/ml/features/test_feature_engineering.py +346 -0
- mcli/ml/logging.py +85 -0
- mcli/ml/mlops/data_versioning.py +518 -0
- mcli/ml/mlops/experiment_tracker.py +377 -0
- mcli/ml/mlops/model_serving.py +481 -0
- mcli/ml/mlops/pipeline_orchestrator.py +614 -0
- mcli/ml/models/base_models.py +324 -0
- mcli/ml/models/ensemble_models.py +675 -0
- mcli/ml/models/recommendation_models.py +474 -0
- mcli/ml/models/test_models.py +487 -0
- mcli/ml/monitoring/drift_detection.py +676 -0
- mcli/ml/monitoring/metrics.py +45 -0
- mcli/ml/optimization/portfolio_optimizer.py +834 -0
- mcli/ml/preprocessing/data_cleaners.py +451 -0
- mcli/ml/preprocessing/feature_extractors.py +491 -0
- mcli/ml/preprocessing/ml_pipeline.py +382 -0
- mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
- mcli/ml/preprocessing/test_preprocessing.py +294 -0
- mcli/ml/scripts/populate_sample_data.py +200 -0
- mcli/ml/tasks.py +400 -0
- mcli/ml/tests/test_integration.py +429 -0
- mcli/ml/tests/test_training_dashboard.py +387 -0
- mcli/public/oi/oi.py +15 -0
- mcli/public/public.py +4 -0
- mcli/self/self_cmd.py +1246 -0
- mcli/workflow/daemon/api_daemon.py +800 -0
- mcli/workflow/daemon/async_command_database.py +681 -0
- mcli/workflow/daemon/async_process_manager.py +591 -0
- mcli/workflow/daemon/client.py +530 -0
- mcli/workflow/daemon/commands.py +1196 -0
- mcli/workflow/daemon/daemon.py +905 -0
- mcli/workflow/daemon/daemon_api.py +59 -0
- mcli/workflow/daemon/enhanced_daemon.py +571 -0
- mcli/workflow/daemon/process_cli.py +244 -0
- mcli/workflow/daemon/process_manager.py +439 -0
- mcli/workflow/daemon/test_daemon.py +275 -0
- mcli/workflow/dashboard/dashboard_cmd.py +113 -0
- mcli/workflow/docker/docker.py +0 -0
- mcli/workflow/file/file.py +100 -0
- mcli/workflow/gcloud/config.toml +21 -0
- mcli/workflow/gcloud/gcloud.py +58 -0
- mcli/workflow/git_commit/ai_service.py +328 -0
- mcli/workflow/git_commit/commands.py +430 -0
- mcli/workflow/lsh_integration.py +355 -0
- mcli/workflow/model_service/client.py +594 -0
- mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
- mcli/workflow/model_service/lightweight_embedder.py +397 -0
- mcli/workflow/model_service/lightweight_model_server.py +714 -0
- mcli/workflow/model_service/lightweight_test.py +241 -0
- mcli/workflow/model_service/model_service.py +1955 -0
- mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
- mcli/workflow/model_service/pdf_processor.py +386 -0
- mcli/workflow/model_service/test_efficient_runner.py +234 -0
- mcli/workflow/model_service/test_example.py +315 -0
- mcli/workflow/model_service/test_integration.py +131 -0
- mcli/workflow/model_service/test_new_features.py +149 -0
- mcli/workflow/openai/openai.py +99 -0
- mcli/workflow/politician_trading/commands.py +1790 -0
- mcli/workflow/politician_trading/config.py +134 -0
- mcli/workflow/politician_trading/connectivity.py +490 -0
- mcli/workflow/politician_trading/data_sources.py +395 -0
- mcli/workflow/politician_trading/database.py +410 -0
- mcli/workflow/politician_trading/demo.py +248 -0
- mcli/workflow/politician_trading/models.py +165 -0
- mcli/workflow/politician_trading/monitoring.py +413 -0
- mcli/workflow/politician_trading/scrapers.py +966 -0
- mcli/workflow/politician_trading/scrapers_california.py +412 -0
- mcli/workflow/politician_trading/scrapers_eu.py +377 -0
- mcli/workflow/politician_trading/scrapers_uk.py +350 -0
- mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
- mcli/workflow/politician_trading/supabase_functions.py +354 -0
- mcli/workflow/politician_trading/workflow.py +852 -0
- mcli/workflow/registry/registry.py +180 -0
- mcli/workflow/repo/repo.py +223 -0
- mcli/workflow/scheduler/commands.py +493 -0
- mcli/workflow/scheduler/cron_parser.py +238 -0
- mcli/workflow/scheduler/job.py +182 -0
- mcli/workflow/scheduler/monitor.py +139 -0
- mcli/workflow/scheduler/persistence.py +324 -0
- mcli/workflow/scheduler/scheduler.py +679 -0
- mcli/workflow/sync/sync_cmd.py +437 -0
- mcli/workflow/sync/test_cmd.py +314 -0
- mcli/workflow/videos/videos.py +242 -0
- mcli/workflow/wakatime/wakatime.py +11 -0
- mcli/workflow/workflow.py +37 -0
- mcli_framework-7.0.0.dist-info/METADATA +479 -0
- mcli_framework-7.0.0.dist-info/RECORD +186 -0
- mcli_framework-7.0.0.dist-info/WHEEL +5 -0
- mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
- mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
- mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,501 @@
|
|
|
1
|
+
"""API connectors for real-time data ingestion"""
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
import asyncio
|
|
5
|
+
import aiohttp
|
|
6
|
+
import websockets
|
|
7
|
+
import json
|
|
8
|
+
import pandas as pd
|
|
9
|
+
from typing import Dict, Any, Optional, List, Callable, AsyncIterator
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
import logging
|
|
13
|
+
from abc import ABC, abstractmethod
|
|
14
|
+
import time
|
|
15
|
+
from urllib.parse import urljoin
|
|
16
|
+
import yfinance as yf
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class APIConfig:
|
|
23
|
+
"""API configuration"""
|
|
24
|
+
api_key: Optional[str] = None
|
|
25
|
+
base_url: str = ""
|
|
26
|
+
rate_limit: int = 100 # requests per minute
|
|
27
|
+
timeout: int = 30
|
|
28
|
+
retry_count: int = 3
|
|
29
|
+
retry_delay: int = 1
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class BaseAPIConnector(ABC):
|
|
33
|
+
"""Base class for API connectors"""
|
|
34
|
+
|
|
35
|
+
def __init__(self, config: APIConfig):
|
|
36
|
+
self.config = config
|
|
37
|
+
self.session = None
|
|
38
|
+
self.rate_limiter = RateLimiter(config.rate_limit)
|
|
39
|
+
|
|
40
|
+
@abstractmethod
|
|
41
|
+
async def fetch_data(self, **kwargs) -> Dict[str, Any]:
|
|
42
|
+
"""Fetch data from API"""
|
|
43
|
+
pass
|
|
44
|
+
|
|
45
|
+
async def _make_request(self, endpoint: str, params: Optional[Dict] = None) -> Dict[str, Any]:
|
|
46
|
+
"""Make API request with rate limiting and retry logic"""
|
|
47
|
+
await self.rate_limiter.acquire()
|
|
48
|
+
|
|
49
|
+
url = urljoin(self.config.base_url, endpoint)
|
|
50
|
+
|
|
51
|
+
headers = {}
|
|
52
|
+
if self.config.api_key:
|
|
53
|
+
headers["Authorization"] = f"Bearer {self.config.api_key}"
|
|
54
|
+
|
|
55
|
+
retry_count = 0
|
|
56
|
+
while retry_count < self.config.retry_count:
|
|
57
|
+
try:
|
|
58
|
+
if not self.session:
|
|
59
|
+
self.session = aiohttp.ClientSession()
|
|
60
|
+
|
|
61
|
+
async with self.session.get(
|
|
62
|
+
url,
|
|
63
|
+
params=params,
|
|
64
|
+
headers=headers,
|
|
65
|
+
timeout=self.config.timeout
|
|
66
|
+
) as response:
|
|
67
|
+
response.raise_for_status()
|
|
68
|
+
return await response.json()
|
|
69
|
+
|
|
70
|
+
except aiohttp.ClientError as e:
|
|
71
|
+
retry_count += 1
|
|
72
|
+
if retry_count >= self.config.retry_count:
|
|
73
|
+
logger.error(f"API request failed after {retry_count} retries: {e}")
|
|
74
|
+
raise
|
|
75
|
+
await asyncio.sleep(self.config.retry_delay * retry_count)
|
|
76
|
+
|
|
77
|
+
async def close(self):
|
|
78
|
+
"""Close session"""
|
|
79
|
+
if self.session:
|
|
80
|
+
await self.session.close()
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class RateLimiter:
|
|
84
|
+
"""Rate limiter for API requests"""
|
|
85
|
+
|
|
86
|
+
def __init__(self, rate_limit: int):
|
|
87
|
+
self.rate_limit = rate_limit
|
|
88
|
+
self.tokens = rate_limit
|
|
89
|
+
self.updated_at = time.time()
|
|
90
|
+
self.lock = asyncio.Lock()
|
|
91
|
+
|
|
92
|
+
async def acquire(self):
|
|
93
|
+
"""Acquire rate limit token"""
|
|
94
|
+
async with self.lock:
|
|
95
|
+
while self.tokens <= 0:
|
|
96
|
+
now = time.time()
|
|
97
|
+
elapsed = now - self.updated_at
|
|
98
|
+
|
|
99
|
+
if elapsed >= 60: # Reset every minute
|
|
100
|
+
self.tokens = self.rate_limit
|
|
101
|
+
self.updated_at = now
|
|
102
|
+
else:
|
|
103
|
+
await asyncio.sleep(1)
|
|
104
|
+
|
|
105
|
+
self.tokens -= 1
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class CongressionalDataAPI(BaseAPIConnector):
|
|
109
|
+
"""Congressional trading data API connector"""
|
|
110
|
+
|
|
111
|
+
def __init__(self, config: Optional[APIConfig] = None):
|
|
112
|
+
if not config:
|
|
113
|
+
config = APIConfig(
|
|
114
|
+
base_url="https://api.capitoltrades.com/v1/",
|
|
115
|
+
rate_limit=60
|
|
116
|
+
)
|
|
117
|
+
super().__init__(config)
|
|
118
|
+
|
|
119
|
+
async def fetch_recent_trades(self, days: int = 30) -> List[Dict[str, Any]]:
|
|
120
|
+
"""Fetch recent congressional trades"""
|
|
121
|
+
params = {
|
|
122
|
+
"from_date": (datetime.now() - timedelta(days=days)).isoformat(),
|
|
123
|
+
"to_date": datetime.now().isoformat(),
|
|
124
|
+
"limit": 1000
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
data = await self._make_request("trades", params)
|
|
129
|
+
return data.get("trades", [])
|
|
130
|
+
except Exception as e:
|
|
131
|
+
logger.error(f"Failed to fetch congressional trades: {e}")
|
|
132
|
+
return self._generate_mock_trades()
|
|
133
|
+
|
|
134
|
+
async def fetch_politician_info(self, politician_id: str) -> Dict[str, Any]:
|
|
135
|
+
"""Fetch politician information"""
|
|
136
|
+
try:
|
|
137
|
+
return await self._make_request(f"politicians/{politician_id}")
|
|
138
|
+
except Exception as e:
|
|
139
|
+
logger.error(f"Failed to fetch politician info: {e}")
|
|
140
|
+
return self._generate_mock_politician_info(politician_id)
|
|
141
|
+
|
|
142
|
+
def _generate_mock_trades(self) -> List[Dict[str, Any]]:
|
|
143
|
+
"""Generate mock trades for testing"""
|
|
144
|
+
import random
|
|
145
|
+
trades = []
|
|
146
|
+
politicians = ["Nancy Pelosi", "Mitch McConnell", "Chuck Schumer", "Kevin McCarthy"]
|
|
147
|
+
tickers = ["AAPL", "MSFT", "GOOGL", "AMZN", "TSLA", "META", "NVDA"]
|
|
148
|
+
|
|
149
|
+
for _ in range(50):
|
|
150
|
+
trades.append({
|
|
151
|
+
"politician": random.choice(politicians),
|
|
152
|
+
"ticker": random.choice(tickers),
|
|
153
|
+
"transaction_type": random.choice(["buy", "sell"]),
|
|
154
|
+
"amount": random.randint(1000, 1000000),
|
|
155
|
+
"transaction_date": (datetime.now() - timedelta(days=random.randint(1, 30))).isoformat(),
|
|
156
|
+
"disclosure_date": datetime.now().isoformat()
|
|
157
|
+
})
|
|
158
|
+
|
|
159
|
+
return trades
|
|
160
|
+
|
|
161
|
+
def _generate_mock_politician_info(self, politician_id: str) -> Dict[str, Any]:
|
|
162
|
+
"""Generate mock politician info"""
|
|
163
|
+
return {
|
|
164
|
+
"id": politician_id,
|
|
165
|
+
"name": "Mock Politician",
|
|
166
|
+
"party": "Independent",
|
|
167
|
+
"state": "CA",
|
|
168
|
+
"chamber": "House",
|
|
169
|
+
"committees": ["Finance", "Technology"]
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
class StockMarketAPI(BaseAPIConnector):
|
|
174
|
+
"""Base class for stock market APIs"""
|
|
175
|
+
|
|
176
|
+
async def fetch_quote(self, symbol: str) -> Dict[str, Any]:
|
|
177
|
+
"""Fetch current stock quote"""
|
|
178
|
+
pass
|
|
179
|
+
|
|
180
|
+
async def fetch_historical(self, symbol: str, period: str = "1mo") -> pd.DataFrame:
|
|
181
|
+
"""Fetch historical stock data"""
|
|
182
|
+
pass
|
|
183
|
+
|
|
184
|
+
async def stream_quotes(self, symbols: List[str]) -> AsyncIterator[Dict[str, Any]]:
|
|
185
|
+
"""Stream real-time quotes"""
|
|
186
|
+
pass
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class AlphaVantageConnector(StockMarketAPI):
|
|
190
|
+
"""Alpha Vantage API connector"""
|
|
191
|
+
|
|
192
|
+
def __init__(self, api_key: str):
|
|
193
|
+
config = APIConfig(
|
|
194
|
+
api_key=api_key,
|
|
195
|
+
base_url="https://www.alphavantage.co/query",
|
|
196
|
+
rate_limit=5 # Free tier: 5 requests per minute
|
|
197
|
+
)
|
|
198
|
+
super().__init__(config)
|
|
199
|
+
|
|
200
|
+
async def fetch_quote(self, symbol: str) -> Dict[str, Any]:
|
|
201
|
+
"""Fetch current quote from Alpha Vantage"""
|
|
202
|
+
params = {
|
|
203
|
+
"function": "GLOBAL_QUOTE",
|
|
204
|
+
"symbol": symbol,
|
|
205
|
+
"apikey": self.config.api_key
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
data = await self._make_request("", params)
|
|
209
|
+
return self._parse_quote(data.get("Global Quote", {}))
|
|
210
|
+
|
|
211
|
+
async def fetch_historical(self, symbol: str, period: str = "1mo") -> pd.DataFrame:
|
|
212
|
+
"""Fetch historical data from Alpha Vantage"""
|
|
213
|
+
params = {
|
|
214
|
+
"function": "TIME_SERIES_DAILY",
|
|
215
|
+
"symbol": symbol,
|
|
216
|
+
"outputsize": "full" if period == "max" else "compact",
|
|
217
|
+
"apikey": self.config.api_key
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
data = await self._make_request("", params)
|
|
221
|
+
time_series = data.get("Time Series (Daily)", {})
|
|
222
|
+
|
|
223
|
+
# Convert to DataFrame
|
|
224
|
+
df = pd.DataFrame.from_dict(time_series, orient='index')
|
|
225
|
+
df.index = pd.to_datetime(df.index)
|
|
226
|
+
df.columns = ['open', 'high', 'low', 'close', 'volume']
|
|
227
|
+
df = df.astype(float)
|
|
228
|
+
|
|
229
|
+
return df.sort_index()
|
|
230
|
+
|
|
231
|
+
def _parse_quote(self, quote_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
232
|
+
"""Parse Alpha Vantage quote"""
|
|
233
|
+
return {
|
|
234
|
+
"symbol": quote_data.get("01. symbol", ""),
|
|
235
|
+
"price": float(quote_data.get("05. price", 0)),
|
|
236
|
+
"volume": int(quote_data.get("06. volume", 0)),
|
|
237
|
+
"timestamp": quote_data.get("07. latest trading day", ""),
|
|
238
|
+
"change": float(quote_data.get("09. change", 0)),
|
|
239
|
+
"change_percent": quote_data.get("10. change percent", "0%")
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class YahooFinanceConnector(StockMarketAPI):
|
|
244
|
+
"""Yahoo Finance connector using yfinance"""
|
|
245
|
+
|
|
246
|
+
def __init__(self):
|
|
247
|
+
config = APIConfig(rate_limit=2000) # Yahoo Finance is generous
|
|
248
|
+
super().__init__(config)
|
|
249
|
+
|
|
250
|
+
async def fetch_quote(self, symbol: str) -> Dict[str, Any]:
|
|
251
|
+
"""Fetch current quote from Yahoo Finance"""
|
|
252
|
+
try:
|
|
253
|
+
ticker = yf.Ticker(symbol)
|
|
254
|
+
info = ticker.info
|
|
255
|
+
|
|
256
|
+
return {
|
|
257
|
+
"symbol": symbol,
|
|
258
|
+
"price": info.get("currentPrice", info.get("regularMarketPrice", 0)),
|
|
259
|
+
"volume": info.get("volume", 0),
|
|
260
|
+
"market_cap": info.get("marketCap", 0),
|
|
261
|
+
"pe_ratio": info.get("trailingPE", 0),
|
|
262
|
+
"dividend_yield": info.get("dividendYield", 0)
|
|
263
|
+
}
|
|
264
|
+
except Exception as e:
|
|
265
|
+
logger.error(f"Failed to fetch Yahoo Finance quote: {e}")
|
|
266
|
+
return {}
|
|
267
|
+
|
|
268
|
+
async def fetch_historical(self, symbol: str, period: str = "1mo") -> pd.DataFrame:
|
|
269
|
+
"""Fetch historical data from Yahoo Finance"""
|
|
270
|
+
try:
|
|
271
|
+
ticker = yf.Ticker(symbol)
|
|
272
|
+
df = ticker.history(period=period)
|
|
273
|
+
return df
|
|
274
|
+
except Exception as e:
|
|
275
|
+
logger.error(f"Failed to fetch Yahoo Finance historical data: {e}")
|
|
276
|
+
return pd.DataFrame()
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
class PolygonIOConnector(StockMarketAPI):
|
|
280
|
+
"""Polygon.io API connector"""
|
|
281
|
+
|
|
282
|
+
def __init__(self, api_key: str):
|
|
283
|
+
config = APIConfig(
|
|
284
|
+
api_key=api_key,
|
|
285
|
+
base_url="https://api.polygon.io/",
|
|
286
|
+
rate_limit=100
|
|
287
|
+
)
|
|
288
|
+
super().__init__(config)
|
|
289
|
+
|
|
290
|
+
async def fetch_quote(self, symbol: str) -> Dict[str, Any]:
|
|
291
|
+
"""Fetch current quote from Polygon.io"""
|
|
292
|
+
endpoint = f"v2/last/nbbo/{symbol}"
|
|
293
|
+
params = {"apiKey": self.config.api_key}
|
|
294
|
+
|
|
295
|
+
data = await self._make_request(endpoint, params)
|
|
296
|
+
return self._parse_polygon_quote(data)
|
|
297
|
+
|
|
298
|
+
async def fetch_aggregates(self, symbol: str,
|
|
299
|
+
from_date: str, to_date: str,
|
|
300
|
+
timespan: str = "day") -> pd.DataFrame:
|
|
301
|
+
"""Fetch aggregate bars from Polygon.io"""
|
|
302
|
+
endpoint = f"v2/aggs/ticker/{symbol}/range/1/{timespan}/{from_date}/{to_date}"
|
|
303
|
+
params = {
|
|
304
|
+
"apiKey": self.config.api_key,
|
|
305
|
+
"adjusted": "true",
|
|
306
|
+
"sort": "asc"
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
data = await self._make_request(endpoint, params)
|
|
310
|
+
results = data.get("results", [])
|
|
311
|
+
|
|
312
|
+
if not results:
|
|
313
|
+
return pd.DataFrame()
|
|
314
|
+
|
|
315
|
+
df = pd.DataFrame(results)
|
|
316
|
+
df['timestamp'] = pd.to_datetime(df['t'], unit='ms')
|
|
317
|
+
df = df.rename(columns={
|
|
318
|
+
'o': 'open',
|
|
319
|
+
'h': 'high',
|
|
320
|
+
'l': 'low',
|
|
321
|
+
'c': 'close',
|
|
322
|
+
'v': 'volume'
|
|
323
|
+
})
|
|
324
|
+
|
|
325
|
+
return df.set_index('timestamp')
|
|
326
|
+
|
|
327
|
+
def _parse_polygon_quote(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
328
|
+
"""Parse Polygon.io quote"""
|
|
329
|
+
results = data.get("results", {})
|
|
330
|
+
return {
|
|
331
|
+
"symbol": results.get("T", ""),
|
|
332
|
+
"price": results.get("P", 0),
|
|
333
|
+
"size": results.get("S", 0),
|
|
334
|
+
"timestamp": results.get("t", 0)
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
class QuiverQuantConnector(BaseAPIConnector):
|
|
339
|
+
"""QuiverQuant API for congressional trading data"""
|
|
340
|
+
|
|
341
|
+
def __init__(self, api_key: str):
|
|
342
|
+
config = APIConfig(
|
|
343
|
+
api_key=api_key,
|
|
344
|
+
base_url="https://api.quiverquant.com/beta/",
|
|
345
|
+
rate_limit=100
|
|
346
|
+
)
|
|
347
|
+
super().__init__(config)
|
|
348
|
+
|
|
349
|
+
async def fetch_congress_trades(self) -> List[Dict[str, Any]]:
|
|
350
|
+
"""Fetch congressional trading data"""
|
|
351
|
+
headers = {
|
|
352
|
+
"Authorization": f"Bearer {self.config.api_key}",
|
|
353
|
+
"Accept": "application/json"
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
try:
|
|
357
|
+
if not self.session:
|
|
358
|
+
self.session = aiohttp.ClientSession()
|
|
359
|
+
|
|
360
|
+
async with self.session.get(
|
|
361
|
+
f"{self.config.base_url}historical/congresstrading",
|
|
362
|
+
headers=headers
|
|
363
|
+
) as response:
|
|
364
|
+
response.raise_for_status()
|
|
365
|
+
data = await response.json()
|
|
366
|
+
return data
|
|
367
|
+
except Exception as e:
|
|
368
|
+
logger.error(f"Failed to fetch QuiverQuant data: {e}")
|
|
369
|
+
return []
|
|
370
|
+
|
|
371
|
+
async def fetch_lobbying(self, ticker: str) -> List[Dict[str, Any]]:
|
|
372
|
+
"""Fetch lobbying data for a ticker"""
|
|
373
|
+
headers = {
|
|
374
|
+
"Authorization": f"Bearer {self.config.api_key}",
|
|
375
|
+
"Accept": "application/json"
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
try:
|
|
379
|
+
if not self.session:
|
|
380
|
+
self.session = aiohttp.ClientSession()
|
|
381
|
+
|
|
382
|
+
async with self.session.get(
|
|
383
|
+
f"{self.config.base_url}historical/lobbying/{ticker}",
|
|
384
|
+
headers=headers
|
|
385
|
+
) as response:
|
|
386
|
+
response.raise_for_status()
|
|
387
|
+
data = await response.json()
|
|
388
|
+
return data
|
|
389
|
+
except Exception as e:
|
|
390
|
+
logger.error(f"Failed to fetch lobbying data: {e}")
|
|
391
|
+
return []
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
class WebSocketDataStream:
|
|
395
|
+
"""WebSocket stream for real-time data"""
|
|
396
|
+
|
|
397
|
+
def __init__(self, url: str, api_key: Optional[str] = None):
|
|
398
|
+
self.url = url
|
|
399
|
+
self.api_key = api_key
|
|
400
|
+
self.websocket = None
|
|
401
|
+
self.handlers = []
|
|
402
|
+
|
|
403
|
+
def add_handler(self, handler: Callable):
|
|
404
|
+
"""Add message handler"""
|
|
405
|
+
self.handlers.append(handler)
|
|
406
|
+
|
|
407
|
+
async def connect(self):
|
|
408
|
+
"""Connect to WebSocket"""
|
|
409
|
+
headers = {}
|
|
410
|
+
if self.api_key:
|
|
411
|
+
headers["Authorization"] = f"Bearer {self.api_key}"
|
|
412
|
+
|
|
413
|
+
self.websocket = await websockets.connect(self.url, extra_headers=headers)
|
|
414
|
+
logger.info(f"Connected to WebSocket: {self.url}")
|
|
415
|
+
|
|
416
|
+
async def subscribe(self, symbols: List[str]):
|
|
417
|
+
"""Subscribe to symbols"""
|
|
418
|
+
if not self.websocket:
|
|
419
|
+
await self.connect()
|
|
420
|
+
|
|
421
|
+
message = {
|
|
422
|
+
"action": "subscribe",
|
|
423
|
+
"symbols": symbols
|
|
424
|
+
}
|
|
425
|
+
await self.websocket.send(json.dumps(message))
|
|
426
|
+
|
|
427
|
+
async def stream(self):
|
|
428
|
+
"""Stream messages"""
|
|
429
|
+
if not self.websocket:
|
|
430
|
+
await self.connect()
|
|
431
|
+
|
|
432
|
+
async for message in self.websocket:
|
|
433
|
+
data = json.loads(message)
|
|
434
|
+
|
|
435
|
+
# Call handlers
|
|
436
|
+
for handler in self.handlers:
|
|
437
|
+
try:
|
|
438
|
+
if asyncio.iscoroutinefunction(handler):
|
|
439
|
+
await handler(data)
|
|
440
|
+
else:
|
|
441
|
+
handler(data)
|
|
442
|
+
except Exception as e:
|
|
443
|
+
logger.error(f"Handler error: {e}")
|
|
444
|
+
|
|
445
|
+
async def close(self):
|
|
446
|
+
"""Close WebSocket connection"""
|
|
447
|
+
if self.websocket:
|
|
448
|
+
await self.websocket.close()
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
class DataAggregator:
|
|
452
|
+
"""Aggregate data from multiple sources"""
|
|
453
|
+
|
|
454
|
+
def __init__(self):
|
|
455
|
+
self.sources = {}
|
|
456
|
+
self.cache = {}
|
|
457
|
+
self.cache_ttl = 300 # 5 minutes
|
|
458
|
+
|
|
459
|
+
def add_source(self, name: str, connector: BaseAPIConnector):
|
|
460
|
+
"""Add data source"""
|
|
461
|
+
self.sources[name] = connector
|
|
462
|
+
logger.info(f"Added data source: {name}")
|
|
463
|
+
|
|
464
|
+
async def fetch_all(self, symbol: str) -> Dict[str, Any]:
|
|
465
|
+
"""Fetch data from all sources"""
|
|
466
|
+
results = {}
|
|
467
|
+
|
|
468
|
+
# Check cache
|
|
469
|
+
cache_key = f"{symbol}_{int(time.time() // self.cache_ttl)}"
|
|
470
|
+
if cache_key in self.cache:
|
|
471
|
+
return self.cache[cache_key]
|
|
472
|
+
|
|
473
|
+
# Fetch from all sources concurrently
|
|
474
|
+
tasks = []
|
|
475
|
+
for name, connector in self.sources.items():
|
|
476
|
+
if hasattr(connector, 'fetch_quote'):
|
|
477
|
+
tasks.append(self._fetch_with_name(name, connector.fetch_quote(symbol)))
|
|
478
|
+
|
|
479
|
+
responses = await asyncio.gather(*tasks, return_exceptions=True)
|
|
480
|
+
|
|
481
|
+
for name, data in responses:
|
|
482
|
+
if not isinstance(data, Exception):
|
|
483
|
+
results[name] = data
|
|
484
|
+
else:
|
|
485
|
+
logger.error(f"Error fetching from {name}: {data}")
|
|
486
|
+
|
|
487
|
+
# Cache results
|
|
488
|
+
self.cache[cache_key] = results
|
|
489
|
+
|
|
490
|
+
# Clean old cache entries
|
|
491
|
+
if len(self.cache) > 100:
|
|
492
|
+
oldest_keys = sorted(self.cache.keys())[:50]
|
|
493
|
+
for key in oldest_keys:
|
|
494
|
+
del self.cache[key]
|
|
495
|
+
|
|
496
|
+
return results
|
|
497
|
+
|
|
498
|
+
async def _fetch_with_name(self, name: str, coro):
|
|
499
|
+
"""Helper to fetch with source name"""
|
|
500
|
+
result = await coro
|
|
501
|
+
return name, result
|