mcli-framework 7.2.0__py3-none-any.whl → 7.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (97) hide show
  1. mcli/__init__.py +160 -0
  2. mcli/__main__.py +14 -0
  3. mcli/app/__init__.py +23 -0
  4. mcli/app/commands_cmd.py +741 -0
  5. mcli/app/model/__init__.py +0 -0
  6. mcli/app/video/__init__.py +5 -0
  7. mcli/chat/__init__.py +34 -0
  8. mcli/lib/__init__.py +0 -0
  9. mcli/lib/api/__init__.py +0 -0
  10. mcli/lib/auth/__init__.py +1 -0
  11. mcli/lib/config/__init__.py +1 -0
  12. mcli/lib/erd/__init__.py +25 -0
  13. mcli/lib/files/__init__.py +0 -0
  14. mcli/lib/fs/__init__.py +1 -0
  15. mcli/lib/logger/__init__.py +3 -0
  16. mcli/lib/performance/__init__.py +17 -0
  17. mcli/lib/pickles/__init__.py +1 -0
  18. mcli/lib/shell/__init__.py +0 -0
  19. mcli/lib/toml/__init__.py +1 -0
  20. mcli/lib/watcher/__init__.py +0 -0
  21. mcli/ml/__init__.py +16 -0
  22. mcli/ml/api/__init__.py +30 -0
  23. mcli/ml/api/routers/__init__.py +27 -0
  24. mcli/ml/api/schemas.py +2 -2
  25. mcli/ml/auth/__init__.py +45 -0
  26. mcli/ml/auth/models.py +2 -2
  27. mcli/ml/backtesting/__init__.py +39 -0
  28. mcli/ml/cli/__init__.py +5 -0
  29. mcli/ml/cli/main.py +1 -1
  30. mcli/ml/config/__init__.py +33 -0
  31. mcli/ml/configs/__init__.py +16 -0
  32. mcli/ml/dashboard/__init__.py +12 -0
  33. mcli/ml/dashboard/app_integrated.py +296 -30
  34. mcli/ml/dashboard/app_training.py +1 -1
  35. mcli/ml/dashboard/components/__init__.py +7 -0
  36. mcli/ml/dashboard/pages/__init__.py +6 -0
  37. mcli/ml/dashboard/pages/cicd.py +1 -1
  38. mcli/ml/dashboard/pages/debug_dependencies.py +364 -0
  39. mcli/ml/dashboard/pages/gravity_viz.py +565 -0
  40. mcli/ml/dashboard/pages/monte_carlo_predictions.py +555 -0
  41. mcli/ml/dashboard/pages/overview.py +378 -0
  42. mcli/ml/dashboard/pages/predictions_enhanced.py +20 -6
  43. mcli/ml/dashboard/pages/scrapers_and_logs.py +22 -6
  44. mcli/ml/dashboard/pages/test_portfolio.py +423 -0
  45. mcli/ml/dashboard/pages/trading.py +768 -0
  46. mcli/ml/dashboard/streamlit_extras_utils.py +297 -0
  47. mcli/ml/dashboard/utils.py +161 -0
  48. mcli/ml/dashboard/warning_suppression.py +34 -0
  49. mcli/ml/data_ingestion/__init__.py +39 -0
  50. mcli/ml/database/__init__.py +47 -0
  51. mcli/ml/database/session.py +169 -16
  52. mcli/ml/experimentation/__init__.py +29 -0
  53. mcli/ml/features/__init__.py +39 -0
  54. mcli/ml/mlops/__init__.py +33 -0
  55. mcli/ml/models/__init__.py +94 -0
  56. mcli/ml/monitoring/__init__.py +25 -0
  57. mcli/ml/optimization/__init__.py +27 -0
  58. mcli/ml/predictions/__init__.py +5 -0
  59. mcli/ml/predictions/monte_carlo.py +428 -0
  60. mcli/ml/preprocessing/__init__.py +28 -0
  61. mcli/ml/scripts/__init__.py +1 -0
  62. mcli/ml/trading/__init__.py +66 -0
  63. mcli/ml/trading/alpaca_client.py +417 -0
  64. mcli/ml/trading/migrations.py +164 -0
  65. mcli/ml/trading/models.py +418 -0
  66. mcli/ml/trading/paper_trading.py +326 -0
  67. mcli/ml/trading/risk_management.py +370 -0
  68. mcli/ml/trading/trading_service.py +480 -0
  69. mcli/ml/training/__init__.py +10 -0
  70. mcli/mygroup/__init__.py +3 -0
  71. mcli/public/__init__.py +1 -0
  72. mcli/public/commands/__init__.py +2 -0
  73. mcli/self/__init__.py +3 -0
  74. mcli/self/self_cmd.py +514 -15
  75. mcli/workflow/__init__.py +0 -0
  76. mcli/workflow/daemon/__init__.py +15 -0
  77. mcli/workflow/daemon/daemon.py +21 -3
  78. mcli/workflow/dashboard/__init__.py +5 -0
  79. mcli/workflow/docker/__init__.py +0 -0
  80. mcli/workflow/file/__init__.py +0 -0
  81. mcli/workflow/gcloud/__init__.py +1 -0
  82. mcli/workflow/git_commit/__init__.py +0 -0
  83. mcli/workflow/interview/__init__.py +0 -0
  84. mcli/workflow/politician_trading/__init__.py +4 -0
  85. mcli/workflow/registry/__init__.py +0 -0
  86. mcli/workflow/repo/__init__.py +0 -0
  87. mcli/workflow/scheduler/__init__.py +25 -0
  88. mcli/workflow/search/__init__.py +0 -0
  89. mcli/workflow/sync/__init__.py +5 -0
  90. mcli/workflow/videos/__init__.py +1 -0
  91. mcli/workflow/wakatime/__init__.py +80 -0
  92. {mcli_framework-7.2.0.dist-info → mcli_framework-7.4.0.dist-info}/METADATA +4 -1
  93. {mcli_framework-7.2.0.dist-info → mcli_framework-7.4.0.dist-info}/RECORD +97 -18
  94. {mcli_framework-7.2.0.dist-info → mcli_framework-7.4.0.dist-info}/WHEEL +0 -0
  95. {mcli_framework-7.2.0.dist-info → mcli_framework-7.4.0.dist-info}/entry_points.txt +0 -0
  96. {mcli_framework-7.2.0.dist-info → mcli_framework-7.4.0.dist-info}/licenses/LICENSE +0 -0
  97. {mcli_framework-7.2.0.dist-info → mcli_framework-7.4.0.dist-info}/top_level.txt +0 -0
@@ -13,9 +13,121 @@ from mcli.ml.config import settings
13
13
  from .models import Base
14
14
 
15
15
  # Synchronous database setup
16
+ # Prioritize DATABASE_URL environment variable over settings
17
+ import os
18
+ database_url = os.getenv("DATABASE_URL")
19
+
20
+ # Check if DATABASE_URL has placeholder password
21
+ if database_url and "your_password" in database_url:
22
+ database_url = None # Treat placeholder as not set
23
+
24
+ # If no DATABASE_URL or it's SQLite from settings, try explicit configuration
25
+ if not database_url:
26
+ try:
27
+ # Check if settings has a non-SQLite configuration
28
+ settings_url = settings.database.url
29
+ if settings_url and "sqlite" not in settings_url:
30
+ database_url = settings_url
31
+ except (AttributeError, Exception):
32
+ pass # Continue with database_url=None
33
+
34
+ # If still no valid DATABASE_URL, try to use Supabase REST API via connection pooler
35
+ if not database_url:
36
+ supabase_url = os.getenv("SUPABASE_URL", "")
37
+ supabase_service_key = os.getenv("SUPABASE_SERVICE_ROLE_KEY")
38
+
39
+ if supabase_url and supabase_service_key and "supabase.co" in supabase_url:
40
+ # Extract project reference from Supabase URL
41
+ # Format: https://PROJECT_REF.supabase.co
42
+ project_ref = supabase_url.replace("https://", "").replace("http://", "").split(".")[0]
43
+
44
+ # Use Supabase IPv4-only connection pooler
45
+ # This avoids IPv6 connectivity issues on Streamlit Cloud
46
+ # Try EU region poolers (which are verified to work for this project)
47
+ # Session mode (port 5432) for persistent connections
48
+ # Transaction mode (port 6543) for serverless/short-lived connections
49
+ pooler_urls = [
50
+ f"postgresql://postgres.{project_ref}:{supabase_service_key}@aws-1-eu-north-1.pooler.supabase.com:5432/postgres",
51
+ f"postgresql://postgres.{project_ref}:{supabase_service_key}@aws-1-eu-north-1.pooler.supabase.com:6543/postgres",
52
+ ]
53
+
54
+ # Try to connect to poolers
55
+ import logging
56
+ logger = logging.getLogger(__name__)
57
+
58
+ for pooler_url in pooler_urls:
59
+ try:
60
+ # Test connection
61
+ test_engine = create_engine(pooler_url, pool_pre_ping=True)
62
+ with test_engine.connect() as conn:
63
+ from sqlalchemy import text
64
+ conn.execute(text("SELECT 1"))
65
+ database_url = pooler_url
66
+ logger.info(f"Successfully connected via pooler: {pooler_url.split('@')[1].split(':')[0]}")
67
+ test_engine.dispose()
68
+ break
69
+ except Exception as e:
70
+ logger.warning(f"Failed to connect via {pooler_url.split('@')[1].split(':')[0]}: {e}")
71
+ continue
72
+
73
+ if not database_url:
74
+ # Fallback to first pooler URL if all fail (will be handled by pool_pre_ping later)
75
+ database_url = pooler_urls[0]
76
+
77
+ import warnings
78
+ warnings.warn(
79
+ "Using Supabase connection pooler with service role key. "
80
+ "For better performance, set DATABASE_URL with your actual database password. "
81
+ "Find it in Supabase Dashboard → Settings → Database → Connection String"
82
+ )
83
+ else:
84
+ # Default to SQLite for development/testing
85
+ database_url = "sqlite:///./ml_system.db"
86
+ import warnings
87
+ warnings.warn(
88
+ "No database credentials found. Using SQLite fallback. "
89
+ "Set SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY or DATABASE_URL in environment."
90
+ )
91
+
92
+ # Debug: Log which database URL is being used
93
+ import logging
94
+ logger = logging.getLogger(__name__)
95
+
96
+ if "pooler.supabase.com" in database_url:
97
+ logger.info(f"🔗 Using Supabase connection pooler")
98
+ elif "sqlite" in database_url:
99
+ logger.warning("📁 Using SQLite fallback (database features limited)")
100
+ else:
101
+ # Mask password in display
102
+ display_url = database_url
103
+ if "@" in display_url and ":" in display_url:
104
+ parts = display_url.split("@")
105
+ before_at = parts[0].split(":")
106
+ if len(before_at) >= 3:
107
+ before_at[2] = "***"
108
+ display_url = ":".join(before_at) + "@" + parts[1]
109
+ logger.info(f"🔗 Database URL: {display_url}")
110
+
111
+ # Configure connection arguments based on database type
112
+ if "sqlite" in database_url:
113
+ connect_args = {"check_same_thread": False}
114
+ elif "postgresql" in database_url:
115
+ # Force IPv4 for PostgreSQL to avoid IPv6 connection issues
116
+ connect_args = {
117
+ "connect_timeout": 10,
118
+ "options": "-c statement_timeout=30000", # 30 second query timeout
119
+ }
120
+ else:
121
+ connect_args = {}
122
+
16
123
  engine = create_engine(
17
- settings.database.url,
18
- **settings.get_database_config(),
124
+ database_url,
125
+ connect_args=connect_args,
126
+ pool_pre_ping=True, # Verify connections before using them
127
+ pool_recycle=3600, # Recycle connections after 1 hour
128
+ pool_size=5, # Smaller pool for Streamlit Cloud
129
+ max_overflow=10,
130
+ pool_timeout=30,
19
131
  )
20
132
 
21
133
  SessionLocal = sessionmaker(
@@ -27,14 +139,30 @@ SessionLocal = sessionmaker(
27
139
 
28
140
 
29
141
  # Asynchronous database setup
30
- async_engine = create_async_engine(
31
- settings.database.async_url,
32
- pool_size=settings.database.pool_size,
33
- max_overflow=settings.database.max_overflow,
34
- pool_timeout=settings.database.pool_timeout,
35
- pool_pre_ping=True,
36
- echo=settings.debug,
37
- )
142
+ try:
143
+ async_engine = create_async_engine(
144
+ settings.database.async_url,
145
+ pool_size=settings.database.pool_size,
146
+ max_overflow=settings.database.max_overflow,
147
+ pool_timeout=settings.database.pool_timeout,
148
+ pool_pre_ping=True,
149
+ echo=settings.debug,
150
+ )
151
+ except (AttributeError, Exception):
152
+ # Fallback for async engine
153
+ import os
154
+ async_database_url = os.getenv("ASYNC_DATABASE_URL")
155
+ if not async_database_url:
156
+ # Convert sync URL to async if possible
157
+ if "sqlite" in database_url:
158
+ async_database_url = database_url.replace("sqlite:///", "sqlite+aiosqlite:///")
159
+ else:
160
+ async_database_url = database_url.replace("postgresql://", "postgresql+asyncpg://")
161
+
162
+ async_engine = create_async_engine(
163
+ async_database_url,
164
+ pool_pre_ping=True,
165
+ )
38
166
 
39
167
  AsyncSessionLocal = async_sessionmaker(
40
168
  async_engine,
@@ -81,21 +209,46 @@ async def get_async_db() -> AsyncGenerator[AsyncSession, None]:
81
209
  @contextmanager
82
210
  def get_session() -> Generator[Session, None, None]:
83
211
  """
84
- Context manager for database session.
212
+ Context manager for database session with improved error handling.
85
213
 
86
214
  Usage:
87
215
  with get_session() as session:
88
216
  user = session.query(User).first()
217
+
218
+ Raises:
219
+ ConnectionError: If database connection cannot be established
220
+ Exception: For other database errors
89
221
  """
90
- session = SessionLocal()
222
+ session = None
91
223
  try:
224
+ session = SessionLocal()
225
+ # Test the connection
226
+ from sqlalchemy import text
227
+ session.execute(text("SELECT 1"))
92
228
  yield session
93
229
  session.commit()
94
- except Exception:
95
- session.rollback()
96
- raise
230
+ except Exception as e:
231
+ if session:
232
+ session.rollback()
233
+ # Provide more helpful error messages
234
+ error_msg = str(e).lower()
235
+ if "cannot assign requested address" in error_msg or "ipv6" in error_msg:
236
+ raise ConnectionError(
237
+ "Database connection failed due to network issues. "
238
+ "This may be an IPv6 connectivity problem. "
239
+ "Please ensure DATABASE_URL uses connection pooler (pooler.supabase.com) instead of direct connection (db.supabase.co). "
240
+ f"Original error: {e}"
241
+ )
242
+ elif "authentication failed" in error_msg or "password" in error_msg:
243
+ raise ConnectionError(
244
+ "Database authentication failed. Please check your database credentials. "
245
+ f"Original error: {e}"
246
+ )
247
+ else:
248
+ raise
97
249
  finally:
98
- session.close()
250
+ if session:
251
+ session.close()
99
252
 
100
253
 
101
254
  @asynccontextmanager
@@ -0,0 +1,29 @@
1
+ """ML Experimentation and A/B Testing Framework"""
2
+
3
+ from .ab_testing import (
4
+ ABTestingFramework,
5
+ ExperimentConfig,
6
+ ExperimentResult,
7
+ ExperimentStatus,
8
+ Metric,
9
+ MetricsCollector,
10
+ StatisticalAnalyzer,
11
+ TrafficSplitter,
12
+ UserAssignment,
13
+ Variant,
14
+ VariantType,
15
+ )
16
+
17
+ __all__ = [
18
+ "ABTestingFramework",
19
+ "ExperimentConfig",
20
+ "Variant",
21
+ "VariantType",
22
+ "Metric",
23
+ "ExperimentStatus",
24
+ "ExperimentResult",
25
+ "UserAssignment",
26
+ "TrafficSplitter",
27
+ "MetricsCollector",
28
+ "StatisticalAnalyzer",
29
+ ]
@@ -0,0 +1,39 @@
1
+ """Feature Engineering Module for Stock Recommendation Models"""
2
+
3
+ from .ensemble_features import (
4
+ DynamicFeatureSelector,
5
+ EnsembleFeatureBuilder,
6
+ FeatureInteractionEngine,
7
+ )
8
+ from .political_features import (
9
+ CongressionalTrackingFeatures,
10
+ PolicyImpactFeatures,
11
+ PoliticalInfluenceFeatures,
12
+ )
13
+ from .recommendation_engine import (
14
+ RecommendationConfig,
15
+ RecommendationResult,
16
+ StockRecommendationEngine,
17
+ )
18
+ from .stock_features import (
19
+ CrossAssetFeatures,
20
+ MarketRegimeFeatures,
21
+ StockRecommendationFeatures,
22
+ TechnicalIndicatorFeatures,
23
+ )
24
+
25
+ __all__ = [
26
+ "StockRecommendationFeatures",
27
+ "TechnicalIndicatorFeatures",
28
+ "MarketRegimeFeatures",
29
+ "CrossAssetFeatures",
30
+ "PoliticalInfluenceFeatures",
31
+ "CongressionalTrackingFeatures",
32
+ "PolicyImpactFeatures",
33
+ "EnsembleFeatureBuilder",
34
+ "FeatureInteractionEngine",
35
+ "DynamicFeatureSelector",
36
+ "StockRecommendationEngine",
37
+ "RecommendationConfig",
38
+ "RecommendationResult",
39
+ ]
@@ -0,0 +1,33 @@
1
+ """MLOps components for ML pipeline management"""
2
+
3
+ from .experiment_tracker import (
4
+ ExperimentRun,
5
+ ExperimentTracker,
6
+ MLflowConfig,
7
+ ModelRegistry,
8
+ )
9
+ from .model_serving import (
10
+ ModelEndpoint,
11
+ ModelServer,
12
+ PredictionService,
13
+ )
14
+ from .pipeline_orchestrator import (
15
+ MLPipeline,
16
+ PipelineConfig,
17
+ PipelineExecutor,
18
+ PipelineStep,
19
+ )
20
+
21
+ __all__ = [
22
+ "ExperimentTracker",
23
+ "ModelRegistry",
24
+ "MLflowConfig",
25
+ "ExperimentRun",
26
+ "ModelServer",
27
+ "PredictionService",
28
+ "ModelEndpoint",
29
+ "MLPipeline",
30
+ "PipelineStep",
31
+ "PipelineConfig",
32
+ "PipelineExecutor",
33
+ ]
@@ -0,0 +1,94 @@
1
+ """ML Models for Stock Recommendation System"""
2
+
3
+ from pathlib import Path
4
+ from typing import Any, Dict, Optional
5
+
6
+ import torch
7
+
8
+ from .base_models import (
9
+ BaseStockModel,
10
+ ModelMetrics,
11
+ ValidationResult,
12
+ )
13
+ from .ensemble_models import (
14
+ AttentionStockPredictor,
15
+ CNNFeatureExtractor,
16
+ DeepEnsembleModel,
17
+ EnsembleConfig,
18
+ EnsembleTrainer,
19
+ LSTMStockPredictor,
20
+ ModelConfig,
21
+ TransformerStockModel,
22
+ )
23
+ from .recommendation_models import (
24
+ RecommendationConfig,
25
+ RecommendationTrainer,
26
+ StockRecommendationModel,
27
+ )
28
+
29
+ # Model registry
30
+ _loaded_models: Dict[str, Any] = {}
31
+
32
+
33
+ async def load_production_models():
34
+ """Load production models into memory"""
35
+ from mcli.ml.config import settings
36
+ from mcli.ml.logging import get_logger
37
+
38
+ logger = get_logger(__name__)
39
+ model_dir = settings.model.model_dir
40
+
41
+ if not model_dir.exists():
42
+ model_dir.mkdir(parents=True, exist_ok=True)
43
+ return
44
+
45
+ for model_path in model_dir.glob("*.pt"):
46
+ try:
47
+ model_id = model_path.stem
48
+ model = torch.load(model_path, map_location=settings.model.device)
49
+ _loaded_models[model_id] = model
50
+ logger.info(f"Loaded model: {model_id}")
51
+ except Exception as e:
52
+ logger.error(f"Failed to load model {model_path}: {e}")
53
+
54
+
55
+ async def get_model_by_id(model_id: str):
56
+ """Get loaded model by ID"""
57
+ from mcli.ml.config import settings
58
+
59
+ if model_id not in _loaded_models:
60
+ # Try to load from disk
61
+ model_path = settings.model.model_dir / f"{model_id}.pt"
62
+ if model_path.exists():
63
+ _loaded_models[model_id] = torch.load(model_path, map_location=settings.model.device)
64
+
65
+ return _loaded_models.get(model_id)
66
+
67
+
68
+ def initialize_models():
69
+ """Initialize models on startup"""
70
+ from mcli.ml.logging import get_logger
71
+
72
+ logger = get_logger(__name__)
73
+ logger.info("Initializing ML models...")
74
+
75
+
76
+ __all__ = [
77
+ "DeepEnsembleModel",
78
+ "AttentionStockPredictor",
79
+ "TransformerStockModel",
80
+ "LSTMStockPredictor",
81
+ "CNNFeatureExtractor",
82
+ "EnsembleTrainer",
83
+ "ModelConfig",
84
+ "EnsembleConfig",
85
+ "BaseStockModel",
86
+ "ModelMetrics",
87
+ "ValidationResult",
88
+ "StockRecommendationModel",
89
+ "RecommendationTrainer",
90
+ "RecommendationConfig",
91
+ "load_production_models",
92
+ "get_model_by_id",
93
+ "initialize_models",
94
+ ]
@@ -0,0 +1,25 @@
1
+ """ML Model Monitoring and Drift Detection"""
2
+
3
+ from .drift_detection import (
4
+ AlertSeverity,
5
+ ConceptDriftDetector,
6
+ DataProfile,
7
+ DriftAlert,
8
+ DriftType,
9
+ ModelMetrics,
10
+ ModelMonitor,
11
+ OutlierDetector,
12
+ StatisticalDriftDetector,
13
+ )
14
+
15
+ __all__ = [
16
+ "ModelMonitor",
17
+ "StatisticalDriftDetector",
18
+ "ConceptDriftDetector",
19
+ "OutlierDetector",
20
+ "DriftAlert",
21
+ "DriftType",
22
+ "AlertSeverity",
23
+ "ModelMetrics",
24
+ "DataProfile",
25
+ ]
@@ -0,0 +1,27 @@
1
+ """Advanced Portfolio Optimization"""
2
+
3
+ from .portfolio_optimizer import (
4
+ AdvancedPortfolioOptimizer,
5
+ BaseOptimizer,
6
+ BlackLittermanOptimizer,
7
+ CVaROptimizer,
8
+ KellyCriterionOptimizer,
9
+ MeanVarianceOptimizer,
10
+ OptimizationConstraints,
11
+ OptimizationObjective,
12
+ PortfolioAllocation,
13
+ RiskParityOptimizer,
14
+ )
15
+
16
+ __all__ = [
17
+ "AdvancedPortfolioOptimizer",
18
+ "OptimizationObjective",
19
+ "OptimizationConstraints",
20
+ "PortfolioAllocation",
21
+ "MeanVarianceOptimizer",
22
+ "RiskParityOptimizer",
23
+ "BlackLittermanOptimizer",
24
+ "CVaROptimizer",
25
+ "KellyCriterionOptimizer",
26
+ "BaseOptimizer",
27
+ ]
@@ -0,0 +1,5 @@
1
+ """ML Predictions Module"""
2
+
3
+ from .prediction_engine import PoliticianTradingPredictor
4
+
5
+ __all__ = ["PoliticianTradingPredictor"]