mcli-framework 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcli-framework might be problematic. Click here for more details.
- mcli/app/chat_cmd.py +42 -0
- mcli/app/commands_cmd.py +226 -0
- mcli/app/completion_cmd.py +216 -0
- mcli/app/completion_helpers.py +288 -0
- mcli/app/cron_test_cmd.py +697 -0
- mcli/app/logs_cmd.py +419 -0
- mcli/app/main.py +492 -0
- mcli/app/model/model.py +1060 -0
- mcli/app/model_cmd.py +227 -0
- mcli/app/redis_cmd.py +269 -0
- mcli/app/video/video.py +1114 -0
- mcli/app/visual_cmd.py +303 -0
- mcli/chat/chat.py +2409 -0
- mcli/chat/command_rag.py +514 -0
- mcli/chat/enhanced_chat.py +652 -0
- mcli/chat/system_controller.py +1010 -0
- mcli/chat/system_integration.py +1016 -0
- mcli/cli.py +25 -0
- mcli/config.toml +20 -0
- mcli/lib/api/api.py +586 -0
- mcli/lib/api/daemon_client.py +203 -0
- mcli/lib/api/daemon_client_local.py +44 -0
- mcli/lib/api/daemon_decorator.py +217 -0
- mcli/lib/api/mcli_decorators.py +1032 -0
- mcli/lib/auth/auth.py +85 -0
- mcli/lib/auth/aws_manager.py +85 -0
- mcli/lib/auth/azure_manager.py +91 -0
- mcli/lib/auth/credential_manager.py +192 -0
- mcli/lib/auth/gcp_manager.py +93 -0
- mcli/lib/auth/key_manager.py +117 -0
- mcli/lib/auth/mcli_manager.py +93 -0
- mcli/lib/auth/token_manager.py +75 -0
- mcli/lib/auth/token_util.py +1011 -0
- mcli/lib/config/config.py +47 -0
- mcli/lib/discovery/__init__.py +1 -0
- mcli/lib/discovery/command_discovery.py +274 -0
- mcli/lib/erd/erd.py +1345 -0
- mcli/lib/erd/generate_graph.py +453 -0
- mcli/lib/files/files.py +76 -0
- mcli/lib/fs/fs.py +109 -0
- mcli/lib/lib.py +29 -0
- mcli/lib/logger/logger.py +611 -0
- mcli/lib/performance/optimizer.py +409 -0
- mcli/lib/performance/rust_bridge.py +502 -0
- mcli/lib/performance/uvloop_config.py +154 -0
- mcli/lib/pickles/pickles.py +50 -0
- mcli/lib/search/cached_vectorizer.py +479 -0
- mcli/lib/services/data_pipeline.py +460 -0
- mcli/lib/services/lsh_client.py +441 -0
- mcli/lib/services/redis_service.py +387 -0
- mcli/lib/shell/shell.py +137 -0
- mcli/lib/toml/toml.py +33 -0
- mcli/lib/ui/styling.py +47 -0
- mcli/lib/ui/visual_effects.py +634 -0
- mcli/lib/watcher/watcher.py +185 -0
- mcli/ml/api/app.py +215 -0
- mcli/ml/api/middleware.py +224 -0
- mcli/ml/api/routers/admin_router.py +12 -0
- mcli/ml/api/routers/auth_router.py +244 -0
- mcli/ml/api/routers/backtest_router.py +12 -0
- mcli/ml/api/routers/data_router.py +12 -0
- mcli/ml/api/routers/model_router.py +302 -0
- mcli/ml/api/routers/monitoring_router.py +12 -0
- mcli/ml/api/routers/portfolio_router.py +12 -0
- mcli/ml/api/routers/prediction_router.py +267 -0
- mcli/ml/api/routers/trade_router.py +12 -0
- mcli/ml/api/routers/websocket_router.py +76 -0
- mcli/ml/api/schemas.py +64 -0
- mcli/ml/auth/auth_manager.py +425 -0
- mcli/ml/auth/models.py +154 -0
- mcli/ml/auth/permissions.py +302 -0
- mcli/ml/backtesting/backtest_engine.py +502 -0
- mcli/ml/backtesting/performance_metrics.py +393 -0
- mcli/ml/cache.py +400 -0
- mcli/ml/cli/main.py +398 -0
- mcli/ml/config/settings.py +394 -0
- mcli/ml/configs/dvc_config.py +230 -0
- mcli/ml/configs/mlflow_config.py +131 -0
- mcli/ml/configs/mlops_manager.py +293 -0
- mcli/ml/dashboard/app.py +532 -0
- mcli/ml/dashboard/app_integrated.py +738 -0
- mcli/ml/dashboard/app_supabase.py +560 -0
- mcli/ml/dashboard/app_training.py +615 -0
- mcli/ml/dashboard/cli.py +51 -0
- mcli/ml/data_ingestion/api_connectors.py +501 -0
- mcli/ml/data_ingestion/data_pipeline.py +567 -0
- mcli/ml/data_ingestion/stream_processor.py +512 -0
- mcli/ml/database/migrations/env.py +94 -0
- mcli/ml/database/models.py +667 -0
- mcli/ml/database/session.py +200 -0
- mcli/ml/experimentation/ab_testing.py +845 -0
- mcli/ml/features/ensemble_features.py +607 -0
- mcli/ml/features/political_features.py +676 -0
- mcli/ml/features/recommendation_engine.py +809 -0
- mcli/ml/features/stock_features.py +573 -0
- mcli/ml/features/test_feature_engineering.py +346 -0
- mcli/ml/logging.py +85 -0
- mcli/ml/mlops/data_versioning.py +518 -0
- mcli/ml/mlops/experiment_tracker.py +377 -0
- mcli/ml/mlops/model_serving.py +481 -0
- mcli/ml/mlops/pipeline_orchestrator.py +614 -0
- mcli/ml/models/base_models.py +324 -0
- mcli/ml/models/ensemble_models.py +675 -0
- mcli/ml/models/recommendation_models.py +474 -0
- mcli/ml/models/test_models.py +487 -0
- mcli/ml/monitoring/drift_detection.py +676 -0
- mcli/ml/monitoring/metrics.py +45 -0
- mcli/ml/optimization/portfolio_optimizer.py +834 -0
- mcli/ml/preprocessing/data_cleaners.py +451 -0
- mcli/ml/preprocessing/feature_extractors.py +491 -0
- mcli/ml/preprocessing/ml_pipeline.py +382 -0
- mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
- mcli/ml/preprocessing/test_preprocessing.py +294 -0
- mcli/ml/scripts/populate_sample_data.py +200 -0
- mcli/ml/tasks.py +400 -0
- mcli/ml/tests/test_integration.py +429 -0
- mcli/ml/tests/test_training_dashboard.py +387 -0
- mcli/public/oi/oi.py +15 -0
- mcli/public/public.py +4 -0
- mcli/self/self_cmd.py +1246 -0
- mcli/workflow/daemon/api_daemon.py +800 -0
- mcli/workflow/daemon/async_command_database.py +681 -0
- mcli/workflow/daemon/async_process_manager.py +591 -0
- mcli/workflow/daemon/client.py +530 -0
- mcli/workflow/daemon/commands.py +1196 -0
- mcli/workflow/daemon/daemon.py +905 -0
- mcli/workflow/daemon/daemon_api.py +59 -0
- mcli/workflow/daemon/enhanced_daemon.py +571 -0
- mcli/workflow/daemon/process_cli.py +244 -0
- mcli/workflow/daemon/process_manager.py +439 -0
- mcli/workflow/daemon/test_daemon.py +275 -0
- mcli/workflow/dashboard/dashboard_cmd.py +113 -0
- mcli/workflow/docker/docker.py +0 -0
- mcli/workflow/file/file.py +100 -0
- mcli/workflow/gcloud/config.toml +21 -0
- mcli/workflow/gcloud/gcloud.py +58 -0
- mcli/workflow/git_commit/ai_service.py +328 -0
- mcli/workflow/git_commit/commands.py +430 -0
- mcli/workflow/lsh_integration.py +355 -0
- mcli/workflow/model_service/client.py +594 -0
- mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
- mcli/workflow/model_service/lightweight_embedder.py +397 -0
- mcli/workflow/model_service/lightweight_model_server.py +714 -0
- mcli/workflow/model_service/lightweight_test.py +241 -0
- mcli/workflow/model_service/model_service.py +1955 -0
- mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
- mcli/workflow/model_service/pdf_processor.py +386 -0
- mcli/workflow/model_service/test_efficient_runner.py +234 -0
- mcli/workflow/model_service/test_example.py +315 -0
- mcli/workflow/model_service/test_integration.py +131 -0
- mcli/workflow/model_service/test_new_features.py +149 -0
- mcli/workflow/openai/openai.py +99 -0
- mcli/workflow/politician_trading/commands.py +1790 -0
- mcli/workflow/politician_trading/config.py +134 -0
- mcli/workflow/politician_trading/connectivity.py +490 -0
- mcli/workflow/politician_trading/data_sources.py +395 -0
- mcli/workflow/politician_trading/database.py +410 -0
- mcli/workflow/politician_trading/demo.py +248 -0
- mcli/workflow/politician_trading/models.py +165 -0
- mcli/workflow/politician_trading/monitoring.py +413 -0
- mcli/workflow/politician_trading/scrapers.py +966 -0
- mcli/workflow/politician_trading/scrapers_california.py +412 -0
- mcli/workflow/politician_trading/scrapers_eu.py +377 -0
- mcli/workflow/politician_trading/scrapers_uk.py +350 -0
- mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
- mcli/workflow/politician_trading/supabase_functions.py +354 -0
- mcli/workflow/politician_trading/workflow.py +852 -0
- mcli/workflow/registry/registry.py +180 -0
- mcli/workflow/repo/repo.py +223 -0
- mcli/workflow/scheduler/commands.py +493 -0
- mcli/workflow/scheduler/cron_parser.py +238 -0
- mcli/workflow/scheduler/job.py +182 -0
- mcli/workflow/scheduler/monitor.py +139 -0
- mcli/workflow/scheduler/persistence.py +324 -0
- mcli/workflow/scheduler/scheduler.py +679 -0
- mcli/workflow/sync/sync_cmd.py +437 -0
- mcli/workflow/sync/test_cmd.py +314 -0
- mcli/workflow/videos/videos.py +242 -0
- mcli/workflow/wakatime/wakatime.py +11 -0
- mcli/workflow/workflow.py +37 -0
- mcli_framework-7.0.0.dist-info/METADATA +479 -0
- mcli_framework-7.0.0.dist-info/RECORD +186 -0
- mcli_framework-7.0.0.dist-info/WHEEL +5 -0
- mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
- mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
- mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,441 @@
|
|
|
1
|
+
"""
|
|
2
|
+
LSH API Client for mcli
|
|
3
|
+
Provides integration with LSH daemon API server for data pipeline processing
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import os
|
|
10
|
+
import time
|
|
11
|
+
from typing import Any, Dict, List, Optional, Callable
|
|
12
|
+
from urllib.parse import urljoin
|
|
13
|
+
|
|
14
|
+
import aiohttp
|
|
15
|
+
import asyncio_mqtt
|
|
16
|
+
from aiohttp_sse_client import client as sse_client
|
|
17
|
+
|
|
18
|
+
from mcli.lib.logger.logger import get_logger
|
|
19
|
+
|
|
20
|
+
logger = get_logger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class LSHClient:
|
|
24
|
+
"""Client for connecting to LSH daemon API server"""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
base_url: Optional[str] = None,
|
|
29
|
+
api_key: Optional[str] = None,
|
|
30
|
+
timeout: int = 30,
|
|
31
|
+
):
|
|
32
|
+
self.base_url = base_url or os.getenv("LSH_API_URL", "http://localhost:3030")
|
|
33
|
+
self.api_key = api_key or os.getenv("LSH_API_KEY")
|
|
34
|
+
self.timeout = aiohttp.ClientTimeout(total=timeout)
|
|
35
|
+
self.session: Optional[aiohttp.ClientSession] = None
|
|
36
|
+
self._event_handlers: Dict[str, List[Callable]] = {}
|
|
37
|
+
|
|
38
|
+
if not self.api_key:
|
|
39
|
+
logger.warning("LSH_API_KEY not set - authentication may fail")
|
|
40
|
+
|
|
41
|
+
async def __aenter__(self):
|
|
42
|
+
"""Async context manager entry"""
|
|
43
|
+
await self.connect()
|
|
44
|
+
return self
|
|
45
|
+
|
|
46
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
47
|
+
"""Async context manager exit"""
|
|
48
|
+
await self.disconnect()
|
|
49
|
+
|
|
50
|
+
async def connect(self):
|
|
51
|
+
"""Initialize aiohttp session"""
|
|
52
|
+
if not self.session:
|
|
53
|
+
connector = aiohttp.TCPConnector(limit=10)
|
|
54
|
+
self.session = aiohttp.ClientSession(
|
|
55
|
+
connector=connector, timeout=self.timeout
|
|
56
|
+
)
|
|
57
|
+
logger.info(f"Connected to LSH API at {self.base_url}")
|
|
58
|
+
|
|
59
|
+
async def disconnect(self):
|
|
60
|
+
"""Close aiohttp session"""
|
|
61
|
+
if self.session:
|
|
62
|
+
await self.session.close()
|
|
63
|
+
self.session = None
|
|
64
|
+
logger.info("Disconnected from LSH API")
|
|
65
|
+
|
|
66
|
+
def _get_headers(self) -> Dict[str, str]:
|
|
67
|
+
"""Get HTTP headers with authentication"""
|
|
68
|
+
headers = {"Content-Type": "application/json"}
|
|
69
|
+
if self.api_key:
|
|
70
|
+
headers["X-API-Key"] = self.api_key
|
|
71
|
+
return headers
|
|
72
|
+
|
|
73
|
+
async def _request(
|
|
74
|
+
self, method: str, endpoint: str, data: Optional[Dict] = None
|
|
75
|
+
) -> Dict[str, Any]:
|
|
76
|
+
"""Make HTTP request to LSH API"""
|
|
77
|
+
if not self.session:
|
|
78
|
+
await self.connect()
|
|
79
|
+
|
|
80
|
+
url = urljoin(self.base_url, endpoint)
|
|
81
|
+
headers = self._get_headers()
|
|
82
|
+
|
|
83
|
+
try:
|
|
84
|
+
async with self.session.request(
|
|
85
|
+
method, url, headers=headers, json=data
|
|
86
|
+
) as response:
|
|
87
|
+
if response.status == 401:
|
|
88
|
+
raise ValueError("LSH API authentication failed - check API key")
|
|
89
|
+
|
|
90
|
+
response.raise_for_status()
|
|
91
|
+
return await response.json()
|
|
92
|
+
|
|
93
|
+
except aiohttp.ClientError as e:
|
|
94
|
+
logger.error(f"LSH API request failed: {e}")
|
|
95
|
+
raise
|
|
96
|
+
|
|
97
|
+
# Job Management
|
|
98
|
+
async def get_status(self) -> Dict[str, Any]:
|
|
99
|
+
"""Get LSH daemon status"""
|
|
100
|
+
return await self._request("GET", "/api/status")
|
|
101
|
+
|
|
102
|
+
async def list_jobs(self, filter_params: Optional[Dict] = None) -> List[Dict]:
|
|
103
|
+
"""List all jobs from LSH daemon"""
|
|
104
|
+
endpoint = "/api/jobs"
|
|
105
|
+
if filter_params:
|
|
106
|
+
# Convert filter to query params
|
|
107
|
+
endpoint += "?" + "&".join(
|
|
108
|
+
f"{k}={v}" for k, v in filter_params.items()
|
|
109
|
+
)
|
|
110
|
+
return await self._request("GET", endpoint)
|
|
111
|
+
|
|
112
|
+
async def get_job(self, job_id: str) -> Dict[str, Any]:
|
|
113
|
+
"""Get specific job details"""
|
|
114
|
+
return await self._request("GET", f"/api/jobs/{job_id}")
|
|
115
|
+
|
|
116
|
+
async def create_job(self, job_spec: Dict[str, Any]) -> Dict[str, Any]:
|
|
117
|
+
"""Create a new job in LSH daemon"""
|
|
118
|
+
return await self._request("POST", "/api/jobs", job_spec)
|
|
119
|
+
|
|
120
|
+
async def trigger_job(self, job_id: str) -> Dict[str, Any]:
|
|
121
|
+
"""Trigger job execution"""
|
|
122
|
+
return await self._request("POST", f"/api/jobs/{job_id}/trigger")
|
|
123
|
+
|
|
124
|
+
async def start_job(self, job_id: str) -> Dict[str, Any]:
|
|
125
|
+
"""Start a job"""
|
|
126
|
+
return await self._request("POST", f"/api/jobs/{job_id}/start")
|
|
127
|
+
|
|
128
|
+
async def stop_job(self, job_id: str, signal: str = "SIGTERM") -> Dict[str, Any]:
|
|
129
|
+
"""Stop a job"""
|
|
130
|
+
return await self._request("POST", f"/api/jobs/{job_id}/stop", {"signal": signal})
|
|
131
|
+
|
|
132
|
+
async def remove_job(self, job_id: str, force: bool = False) -> None:
|
|
133
|
+
"""Remove a job"""
|
|
134
|
+
params = {"force": str(force).lower()}
|
|
135
|
+
endpoint = f"/api/jobs/{job_id}?" + "&".join(f"{k}={v}" for k, v in params.items())
|
|
136
|
+
await self._request("DELETE", endpoint)
|
|
137
|
+
|
|
138
|
+
async def bulk_create_jobs(self, jobs: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
139
|
+
"""Create multiple jobs"""
|
|
140
|
+
return await self._request("POST", "/api/jobs/bulk", {"jobs": jobs})
|
|
141
|
+
|
|
142
|
+
# Data Export
|
|
143
|
+
async def export_jobs(self, format: str = "json") -> str:
|
|
144
|
+
"""Export job data"""
|
|
145
|
+
endpoint = f"/api/export/jobs?format={format}"
|
|
146
|
+
return await self._request("GET", endpoint)
|
|
147
|
+
|
|
148
|
+
# Webhook Management
|
|
149
|
+
async def list_webhooks(self) -> Dict[str, Any]:
|
|
150
|
+
"""List configured webhooks"""
|
|
151
|
+
return await self._request("GET", "/api/webhooks")
|
|
152
|
+
|
|
153
|
+
async def add_webhook(self, endpoint_url: str) -> Dict[str, Any]:
|
|
154
|
+
"""Add webhook endpoint"""
|
|
155
|
+
return await self._request("POST", "/api/webhooks", {"endpoint": endpoint_url})
|
|
156
|
+
|
|
157
|
+
# Event Handling
|
|
158
|
+
def on(self, event_type: str, handler: Callable):
|
|
159
|
+
"""Register event handler"""
|
|
160
|
+
if event_type not in self._event_handlers:
|
|
161
|
+
self._event_handlers[event_type] = []
|
|
162
|
+
self._event_handlers[event_type].append(handler)
|
|
163
|
+
logger.info(f"Registered handler for event: {event_type}")
|
|
164
|
+
|
|
165
|
+
async def _emit_event(self, event_type: str, data: Any):
|
|
166
|
+
"""Emit event to registered handlers"""
|
|
167
|
+
if event_type in self._event_handlers:
|
|
168
|
+
for handler in self._event_handlers[event_type]:
|
|
169
|
+
try:
|
|
170
|
+
if asyncio.iscoroutinefunction(handler):
|
|
171
|
+
await handler(data)
|
|
172
|
+
else:
|
|
173
|
+
handler(data)
|
|
174
|
+
except Exception as e:
|
|
175
|
+
logger.error(f"Error in event handler for {event_type}: {e}")
|
|
176
|
+
|
|
177
|
+
async def stream_events(self):
|
|
178
|
+
"""Stream events from LSH API using Server-Sent Events"""
|
|
179
|
+
if not self.session:
|
|
180
|
+
await self.connect()
|
|
181
|
+
|
|
182
|
+
url = urljoin(self.base_url, "/api/events")
|
|
183
|
+
headers = self._get_headers()
|
|
184
|
+
|
|
185
|
+
logger.info("Starting LSH event stream...")
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
async with sse_client.EventSource(
|
|
189
|
+
url, session=self.session, headers=headers
|
|
190
|
+
) as event_source:
|
|
191
|
+
async for event in event_source:
|
|
192
|
+
try:
|
|
193
|
+
if event.data.strip():
|
|
194
|
+
data = json.loads(event.data)
|
|
195
|
+
event_type = data.get("type", "unknown")
|
|
196
|
+
|
|
197
|
+
logger.debug(f"Received LSH event: {event_type}")
|
|
198
|
+
|
|
199
|
+
# Emit to registered handlers
|
|
200
|
+
await self._emit_event(event_type, data)
|
|
201
|
+
await self._emit_event("*", data) # Wildcard handler
|
|
202
|
+
|
|
203
|
+
except json.JSONDecodeError as e:
|
|
204
|
+
logger.warning(f"Failed to parse SSE data: {e}")
|
|
205
|
+
except Exception as e:
|
|
206
|
+
logger.error(f"Error processing SSE event: {e}")
|
|
207
|
+
|
|
208
|
+
except Exception as e:
|
|
209
|
+
logger.error(f"SSE connection error: {e}")
|
|
210
|
+
raise
|
|
211
|
+
|
|
212
|
+
# Supabase Integration
|
|
213
|
+
async def trigger_supabase_sync(
|
|
214
|
+
self, table: str, operation: str, data: Dict[str, Any]
|
|
215
|
+
) -> Dict[str, Any]:
|
|
216
|
+
"""Trigger Supabase data sync notification"""
|
|
217
|
+
payload = {"table": table, "operation": operation, "data": data}
|
|
218
|
+
return await self._request("POST", "/api/supabase/sync", payload)
|
|
219
|
+
|
|
220
|
+
# Health Check
|
|
221
|
+
async def health_check(self) -> bool:
|
|
222
|
+
"""Check if LSH API is healthy"""
|
|
223
|
+
try:
|
|
224
|
+
if not self.session:
|
|
225
|
+
await self.connect()
|
|
226
|
+
|
|
227
|
+
url = urljoin(self.base_url, "/health")
|
|
228
|
+
async with self.session.get(url) as response:
|
|
229
|
+
return response.status == 200
|
|
230
|
+
except Exception as e:
|
|
231
|
+
logger.error(f"Health check failed: {e}")
|
|
232
|
+
return False
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class LSHEventProcessor:
|
|
236
|
+
"""Process events from LSH daemon for data pipeline integration"""
|
|
237
|
+
|
|
238
|
+
def __init__(self, lsh_client: LSHClient):
|
|
239
|
+
self.client = lsh_client
|
|
240
|
+
self.logger = get_logger(f"{__name__}.processor")
|
|
241
|
+
self._setup_event_handlers()
|
|
242
|
+
|
|
243
|
+
def _setup_event_handlers(self):
|
|
244
|
+
"""Setup default event handlers"""
|
|
245
|
+
self.client.on("job:completed", self._handle_job_completed)
|
|
246
|
+
self.client.on("job:failed", self._handle_job_failed)
|
|
247
|
+
self.client.on("job:started", self._handle_job_started)
|
|
248
|
+
self.client.on("supabase:sync", self._handle_supabase_sync)
|
|
249
|
+
self.client.on("connected", self._handle_connected)
|
|
250
|
+
|
|
251
|
+
async def _handle_connected(self, data: Dict[str, Any]):
|
|
252
|
+
"""Handle connection established event"""
|
|
253
|
+
self.logger.info("Connected to LSH event stream")
|
|
254
|
+
|
|
255
|
+
async def _handle_job_started(self, data: Dict[str, Any]):
|
|
256
|
+
"""Handle job started event"""
|
|
257
|
+
job_data = data.get("data", {})
|
|
258
|
+
job_id = job_data.get("id", "unknown")
|
|
259
|
+
job_name = job_data.get("name", "unknown")
|
|
260
|
+
|
|
261
|
+
self.logger.info(f"LSH job started: {job_name} ({job_id})")
|
|
262
|
+
|
|
263
|
+
# Emit mcli-specific event
|
|
264
|
+
await self._emit_mcli_event("lsh.job.started", {
|
|
265
|
+
"job_id": job_id,
|
|
266
|
+
"job_name": job_name,
|
|
267
|
+
"timestamp": data.get("timestamp"),
|
|
268
|
+
"job_data": job_data
|
|
269
|
+
})
|
|
270
|
+
|
|
271
|
+
async def _handle_job_completed(self, data: Dict[str, Any]):
|
|
272
|
+
"""Handle job completion event"""
|
|
273
|
+
job_data = data.get("data", {})
|
|
274
|
+
job_id = job_data.get("id", "unknown")
|
|
275
|
+
job_name = job_data.get("name", "unknown")
|
|
276
|
+
|
|
277
|
+
self.logger.info(f"LSH job completed: {job_name} ({job_id})")
|
|
278
|
+
|
|
279
|
+
# Process job output if available
|
|
280
|
+
stdout = job_data.get("stdout", "")
|
|
281
|
+
stderr = job_data.get("stderr", "")
|
|
282
|
+
|
|
283
|
+
# Check if this is a politician trading job
|
|
284
|
+
if "politician" in job_name.lower() or "trading" in job_name.lower():
|
|
285
|
+
await self._process_trading_data(job_data, stdout)
|
|
286
|
+
|
|
287
|
+
# Check if this is a Supabase sync job
|
|
288
|
+
if "supabase" in job_name.lower() or job_data.get("databaseSync"):
|
|
289
|
+
await self._process_supabase_job(job_data)
|
|
290
|
+
|
|
291
|
+
# Emit mcli-specific event
|
|
292
|
+
await self._emit_mcli_event("lsh.job.completed", {
|
|
293
|
+
"job_id": job_id,
|
|
294
|
+
"job_name": job_name,
|
|
295
|
+
"timestamp": data.get("timestamp"),
|
|
296
|
+
"job_data": job_data,
|
|
297
|
+
"stdout": stdout,
|
|
298
|
+
"stderr": stderr
|
|
299
|
+
})
|
|
300
|
+
|
|
301
|
+
async def _handle_job_failed(self, data: Dict[str, Any]):
|
|
302
|
+
"""Handle job failure event"""
|
|
303
|
+
job_data = data.get("data", {})
|
|
304
|
+
job_id = job_data.get("id", "unknown")
|
|
305
|
+
job_name = job_data.get("name", "unknown")
|
|
306
|
+
error = job_data.get("error", "Unknown error")
|
|
307
|
+
|
|
308
|
+
self.logger.error(f"LSH job failed: {job_name} ({job_id}) - {error}")
|
|
309
|
+
|
|
310
|
+
# Emit mcli-specific event
|
|
311
|
+
await self._emit_mcli_event("lsh.job.failed", {
|
|
312
|
+
"job_id": job_id,
|
|
313
|
+
"job_name": job_name,
|
|
314
|
+
"timestamp": data.get("timestamp"),
|
|
315
|
+
"error": error,
|
|
316
|
+
"job_data": job_data
|
|
317
|
+
})
|
|
318
|
+
|
|
319
|
+
async def _handle_supabase_sync(self, data: Dict[str, Any]):
|
|
320
|
+
"""Handle Supabase data sync event"""
|
|
321
|
+
table = data.get("table", "unknown")
|
|
322
|
+
operation = data.get("operation", "unknown")
|
|
323
|
+
sync_data = data.get("data", {})
|
|
324
|
+
|
|
325
|
+
self.logger.info(f"Supabase sync: {operation} on {table}")
|
|
326
|
+
|
|
327
|
+
# Process based on table type
|
|
328
|
+
if "politician" in table.lower() or "trading" in table.lower():
|
|
329
|
+
await self._process_politician_data(table, operation, sync_data)
|
|
330
|
+
|
|
331
|
+
# Emit mcli-specific event
|
|
332
|
+
await self._emit_mcli_event("lsh.supabase.sync", {
|
|
333
|
+
"table": table,
|
|
334
|
+
"operation": operation,
|
|
335
|
+
"data": sync_data,
|
|
336
|
+
"timestamp": data.get("timestamp")
|
|
337
|
+
})
|
|
338
|
+
|
|
339
|
+
async def _process_trading_data(self, job_data: Dict, stdout: str):
|
|
340
|
+
"""Process politician trading data from job output"""
|
|
341
|
+
try:
|
|
342
|
+
# Parse trading data from stdout
|
|
343
|
+
if stdout.strip():
|
|
344
|
+
# Assuming JSON output format
|
|
345
|
+
trading_records = []
|
|
346
|
+
for line in stdout.strip().split('\n'):
|
|
347
|
+
try:
|
|
348
|
+
record = json.loads(line)
|
|
349
|
+
trading_records.append(record)
|
|
350
|
+
except json.JSONDecodeError:
|
|
351
|
+
continue
|
|
352
|
+
|
|
353
|
+
if trading_records:
|
|
354
|
+
self.logger.info(f"Processed {len(trading_records)} trading records")
|
|
355
|
+
|
|
356
|
+
# Emit processed data event
|
|
357
|
+
await self._emit_mcli_event("trading.data.processed", {
|
|
358
|
+
"records": trading_records,
|
|
359
|
+
"count": len(trading_records),
|
|
360
|
+
"job_id": job_data.get("id"),
|
|
361
|
+
"timestamp": time.time()
|
|
362
|
+
})
|
|
363
|
+
|
|
364
|
+
except Exception as e:
|
|
365
|
+
self.logger.error(f"Error processing trading data: {e}")
|
|
366
|
+
|
|
367
|
+
async def _process_supabase_job(self, job_data: Dict):
|
|
368
|
+
"""Process Supabase synchronization job"""
|
|
369
|
+
try:
|
|
370
|
+
# Check for database sync metadata
|
|
371
|
+
sync_info = job_data.get("databaseSync", {})
|
|
372
|
+
|
|
373
|
+
self.logger.info(f"Processing Supabase sync job: {job_data.get('name')}")
|
|
374
|
+
|
|
375
|
+
# Emit database sync event
|
|
376
|
+
await self._emit_mcli_event("database.sync.completed", {
|
|
377
|
+
"job_id": job_data.get("id"),
|
|
378
|
+
"sync_info": sync_info,
|
|
379
|
+
"timestamp": time.time()
|
|
380
|
+
})
|
|
381
|
+
|
|
382
|
+
except Exception as e:
|
|
383
|
+
self.logger.error(f"Error processing Supabase job: {e}")
|
|
384
|
+
|
|
385
|
+
async def _process_politician_data(self, table: str, operation: str, data: Dict):
|
|
386
|
+
"""Process politician-related data changes"""
|
|
387
|
+
try:
|
|
388
|
+
self.logger.info(f"Processing politician data: {operation} on {table}")
|
|
389
|
+
|
|
390
|
+
# Apply data transformations based on operation
|
|
391
|
+
processed_data = await self._transform_politician_data(table, operation, data)
|
|
392
|
+
|
|
393
|
+
# Emit transformed data event
|
|
394
|
+
await self._emit_mcli_event("politician.data.updated", {
|
|
395
|
+
"table": table,
|
|
396
|
+
"operation": operation,
|
|
397
|
+
"original_data": data,
|
|
398
|
+
"processed_data": processed_data,
|
|
399
|
+
"timestamp": time.time()
|
|
400
|
+
})
|
|
401
|
+
|
|
402
|
+
except Exception as e:
|
|
403
|
+
self.logger.error(f"Error processing politician data: {e}")
|
|
404
|
+
|
|
405
|
+
async def _transform_politician_data(self, table: str, operation: str, data: Dict) -> Dict:
|
|
406
|
+
"""Transform politician data based on business rules"""
|
|
407
|
+
# Apply transformations here
|
|
408
|
+
transformed = data.copy()
|
|
409
|
+
|
|
410
|
+
# Add computed fields
|
|
411
|
+
if "transaction_amount" in data:
|
|
412
|
+
amount = data["transaction_amount"]
|
|
413
|
+
if isinstance(amount, (int, float)):
|
|
414
|
+
transformed["amount_category"] = self._categorize_amount(amount)
|
|
415
|
+
|
|
416
|
+
# Add timestamps
|
|
417
|
+
transformed["processed_at"] = time.time()
|
|
418
|
+
transformed["mcli_version"] = "1.0.0"
|
|
419
|
+
|
|
420
|
+
return transformed
|
|
421
|
+
|
|
422
|
+
def _categorize_amount(self, amount: float) -> str:
|
|
423
|
+
"""Categorize transaction amounts"""
|
|
424
|
+
if amount < 1000:
|
|
425
|
+
return "small"
|
|
426
|
+
elif amount < 50000:
|
|
427
|
+
return "medium"
|
|
428
|
+
elif amount < 500000:
|
|
429
|
+
return "large"
|
|
430
|
+
else:
|
|
431
|
+
return "very_large"
|
|
432
|
+
|
|
433
|
+
async def _emit_mcli_event(self, event_type: str, data: Dict[str, Any]):
|
|
434
|
+
"""Emit mcli-specific events (can be extended to use message queue)"""
|
|
435
|
+
self.logger.debug(f"Emitting mcli event: {event_type}")
|
|
436
|
+
# For now, just log - can be extended to use Redis, RabbitMQ, etc.
|
|
437
|
+
|
|
438
|
+
async def start_processing(self):
|
|
439
|
+
"""Start processing LSH events"""
|
|
440
|
+
self.logger.info("Starting LSH event processing...")
|
|
441
|
+
await self.client.stream_events()
|