mcli-framework 7.12.1__py3-none-any.whl → 7.12.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcli-framework might be problematic. Click here for more details.
- mcli/app/__init__.py +0 -2
- mcli/app/commands_cmd.py +19 -23
- mcli/app/completion_helpers.py +5 -5
- mcli/app/init_cmd.py +10 -10
- mcli/app/lock_cmd.py +82 -27
- mcli/app/main.py +2 -8
- mcli/app/model/model.py +5 -10
- mcli/app/store_cmd.py +8 -8
- mcli/app/video/__init__.py +0 -2
- mcli/app/video/video.py +1 -14
- mcli/chat/chat.py +90 -108
- mcli/chat/command_rag.py +0 -4
- mcli/chat/enhanced_chat.py +32 -41
- mcli/chat/system_controller.py +37 -37
- mcli/chat/system_integration.py +4 -5
- mcli/cli.py +2 -3
- mcli/lib/api/api.py +4 -9
- mcli/lib/api/daemon_client.py +19 -20
- mcli/lib/api/daemon_client_local.py +1 -3
- mcli/lib/api/daemon_decorator.py +6 -6
- mcli/lib/api/mcli_decorators.py +4 -8
- mcli/lib/auth/__init__.py +0 -1
- mcli/lib/auth/auth.py +4 -5
- mcli/lib/auth/mcli_manager.py +7 -12
- mcli/lib/auth/token_util.py +5 -5
- mcli/lib/config/__init__.py +29 -1
- mcli/lib/config/config.py +0 -1
- mcli/lib/custom_commands.py +1 -1
- mcli/lib/discovery/command_discovery.py +15 -15
- mcli/lib/erd/erd.py +7 -7
- mcli/lib/files/files.py +1 -1
- mcli/lib/fs/__init__.py +31 -1
- mcli/lib/fs/fs.py +12 -13
- mcli/lib/lib.py +0 -1
- mcli/lib/logger/logger.py +7 -10
- mcli/lib/performance/optimizer.py +25 -27
- mcli/lib/performance/rust_bridge.py +22 -27
- mcli/lib/performance/uvloop_config.py +0 -1
- mcli/lib/pickles/__init__.py +0 -1
- mcli/lib/pickles/pickles.py +0 -2
- mcli/lib/secrets/commands.py +0 -2
- mcli/lib/secrets/manager.py +0 -1
- mcli/lib/secrets/repl.py +2 -3
- mcli/lib/secrets/store.py +1 -2
- mcli/lib/services/data_pipeline.py +34 -34
- mcli/lib/services/lsh_client.py +38 -40
- mcli/lib/shell/shell.py +2 -2
- mcli/lib/toml/__init__.py +0 -1
- mcli/lib/ui/styling.py +0 -1
- mcli/lib/ui/visual_effects.py +33 -41
- mcli/lib/watcher/watcher.py +0 -1
- mcli/ml/__init__.py +1 -1
- mcli/ml/api/__init__.py +1 -1
- mcli/ml/api/app.py +8 -9
- mcli/ml/api/middleware.py +10 -10
- mcli/ml/api/routers/__init__.py +1 -1
- mcli/ml/api/routers/admin_router.py +3 -3
- mcli/ml/api/routers/auth_router.py +17 -18
- mcli/ml/api/routers/backtest_router.py +2 -2
- mcli/ml/api/routers/data_router.py +2 -2
- mcli/ml/api/routers/model_router.py +14 -15
- mcli/ml/api/routers/monitoring_router.py +2 -2
- mcli/ml/api/routers/portfolio_router.py +2 -2
- mcli/ml/api/routers/prediction_router.py +10 -9
- mcli/ml/api/routers/trade_router.py +2 -2
- mcli/ml/api/routers/websocket_router.py +6 -7
- mcli/ml/api/schemas.py +2 -2
- mcli/ml/auth/__init__.py +1 -1
- mcli/ml/auth/auth_manager.py +22 -23
- mcli/ml/auth/models.py +17 -17
- mcli/ml/auth/permissions.py +17 -17
- mcli/ml/backtesting/__init__.py +1 -1
- mcli/ml/backtesting/backtest_engine.py +31 -35
- mcli/ml/backtesting/performance_metrics.py +12 -14
- mcli/ml/backtesting/run.py +1 -2
- mcli/ml/cache.py +35 -36
- mcli/ml/cli/__init__.py +1 -1
- mcli/ml/cli/main.py +21 -24
- mcli/ml/config/__init__.py +1 -1
- mcli/ml/config/settings.py +28 -29
- mcli/ml/configs/__init__.py +1 -1
- mcli/ml/configs/dvc_config.py +14 -15
- mcli/ml/configs/mlflow_config.py +12 -13
- mcli/ml/configs/mlops_manager.py +19 -21
- mcli/ml/dashboard/__init__.py +4 -4
- mcli/ml/dashboard/app.py +20 -30
- mcli/ml/dashboard/app_supabase.py +16 -19
- mcli/ml/dashboard/app_training.py +11 -14
- mcli/ml/dashboard/cli.py +2 -2
- mcli/ml/dashboard/common.py +2 -3
- mcli/ml/dashboard/components/__init__.py +1 -1
- mcli/ml/dashboard/components/charts.py +13 -11
- mcli/ml/dashboard/components/metrics.py +7 -7
- mcli/ml/dashboard/components/tables.py +12 -9
- mcli/ml/dashboard/overview.py +2 -2
- mcli/ml/dashboard/pages/__init__.py +1 -1
- mcli/ml/dashboard/pages/cicd.py +15 -18
- mcli/ml/dashboard/pages/debug_dependencies.py +7 -7
- mcli/ml/dashboard/pages/monte_carlo_predictions.py +11 -18
- mcli/ml/dashboard/pages/predictions_enhanced.py +24 -32
- mcli/ml/dashboard/pages/scrapers_and_logs.py +22 -24
- mcli/ml/dashboard/pages/test_portfolio.py +3 -6
- mcli/ml/dashboard/pages/trading.py +16 -18
- mcli/ml/dashboard/pages/workflows.py +20 -30
- mcli/ml/dashboard/utils.py +9 -9
- mcli/ml/dashboard/warning_suppression.py +3 -3
- mcli/ml/data_ingestion/__init__.py +1 -1
- mcli/ml/data_ingestion/api_connectors.py +41 -46
- mcli/ml/data_ingestion/data_pipeline.py +36 -46
- mcli/ml/data_ingestion/stream_processor.py +43 -46
- mcli/ml/database/__init__.py +1 -1
- mcli/ml/database/migrations/env.py +2 -2
- mcli/ml/database/models.py +22 -24
- mcli/ml/database/session.py +14 -14
- mcli/ml/experimentation/__init__.py +1 -1
- mcli/ml/experimentation/ab_testing.py +45 -46
- mcli/ml/features/__init__.py +1 -1
- mcli/ml/features/ensemble_features.py +22 -27
- mcli/ml/features/recommendation_engine.py +30 -30
- mcli/ml/features/stock_features.py +29 -32
- mcli/ml/features/test_feature_engineering.py +10 -11
- mcli/ml/logging.py +4 -4
- mcli/ml/mlops/__init__.py +1 -1
- mcli/ml/mlops/data_versioning.py +29 -30
- mcli/ml/mlops/experiment_tracker.py +24 -24
- mcli/ml/mlops/model_serving.py +31 -34
- mcli/ml/mlops/pipeline_orchestrator.py +27 -35
- mcli/ml/models/__init__.py +5 -6
- mcli/ml/models/base_models.py +23 -23
- mcli/ml/models/ensemble_models.py +31 -31
- mcli/ml/models/recommendation_models.py +18 -19
- mcli/ml/models/test_models.py +14 -16
- mcli/ml/monitoring/__init__.py +1 -1
- mcli/ml/monitoring/drift_detection.py +32 -36
- mcli/ml/monitoring/metrics.py +2 -2
- mcli/ml/optimization/__init__.py +1 -1
- mcli/ml/optimization/optimize.py +1 -2
- mcli/ml/optimization/portfolio_optimizer.py +30 -32
- mcli/ml/predictions/__init__.py +1 -1
- mcli/ml/preprocessing/__init__.py +1 -1
- mcli/ml/preprocessing/data_cleaners.py +22 -23
- mcli/ml/preprocessing/feature_extractors.py +23 -26
- mcli/ml/preprocessing/ml_pipeline.py +23 -23
- mcli/ml/preprocessing/test_preprocessing.py +7 -8
- mcli/ml/scripts/populate_sample_data.py +0 -4
- mcli/ml/serving/serve.py +1 -2
- mcli/ml/tasks.py +17 -17
- mcli/ml/tests/test_integration.py +29 -30
- mcli/ml/tests/test_training_dashboard.py +21 -21
- mcli/ml/trading/__init__.py +1 -1
- mcli/ml/trading/migrations.py +5 -5
- mcli/ml/trading/models.py +21 -23
- mcli/ml/trading/paper_trading.py +16 -13
- mcli/ml/trading/risk_management.py +17 -18
- mcli/ml/trading/trading_service.py +25 -28
- mcli/ml/training/__init__.py +1 -1
- mcli/ml/training/train.py +0 -1
- mcli/public/oi/oi.py +1 -2
- mcli/self/completion_cmd.py +6 -10
- mcli/self/logs_cmd.py +19 -24
- mcli/self/migrate_cmd.py +22 -20
- mcli/self/redis_cmd.py +10 -11
- mcli/self/self_cmd.py +10 -18
- mcli/self/store_cmd.py +10 -12
- mcli/self/visual_cmd.py +9 -14
- mcli/self/zsh_cmd.py +2 -4
- mcli/workflow/daemon/async_command_database.py +23 -24
- mcli/workflow/daemon/async_process_manager.py +27 -29
- mcli/workflow/daemon/client.py +27 -33
- mcli/workflow/daemon/daemon.py +32 -36
- mcli/workflow/daemon/enhanced_daemon.py +24 -33
- mcli/workflow/daemon/process_cli.py +11 -12
- mcli/workflow/daemon/process_manager.py +23 -26
- mcli/workflow/daemon/test_daemon.py +4 -5
- mcli/workflow/dashboard/dashboard_cmd.py +0 -1
- mcli/workflow/doc_convert.py +15 -17
- mcli/workflow/gcloud/__init__.py +0 -1
- mcli/workflow/gcloud/gcloud.py +11 -8
- mcli/workflow/git_commit/ai_service.py +14 -15
- mcli/workflow/lsh_integration.py +9 -11
- mcli/workflow/model_service/client.py +26 -31
- mcli/workflow/model_service/download_and_run_efficient_models.py +10 -14
- mcli/workflow/model_service/lightweight_embedder.py +25 -35
- mcli/workflow/model_service/lightweight_model_server.py +26 -32
- mcli/workflow/model_service/lightweight_test.py +7 -10
- mcli/workflow/model_service/model_service.py +80 -91
- mcli/workflow/model_service/ollama_efficient_runner.py +14 -18
- mcli/workflow/model_service/openai_adapter.py +23 -23
- mcli/workflow/model_service/pdf_processor.py +21 -26
- mcli/workflow/model_service/test_efficient_runner.py +12 -16
- mcli/workflow/model_service/test_example.py +11 -13
- mcli/workflow/model_service/test_integration.py +3 -5
- mcli/workflow/model_service/test_new_features.py +7 -8
- mcli/workflow/notebook/converter.py +1 -1
- mcli/workflow/notebook/notebook_cmd.py +5 -6
- mcli/workflow/notebook/schema.py +0 -1
- mcli/workflow/notebook/validator.py +7 -3
- mcli/workflow/openai/openai.py +1 -2
- mcli/workflow/registry/registry.py +4 -1
- mcli/workflow/repo/repo.py +6 -7
- mcli/workflow/scheduler/cron_parser.py +16 -19
- mcli/workflow/scheduler/job.py +10 -10
- mcli/workflow/scheduler/monitor.py +15 -15
- mcli/workflow/scheduler/persistence.py +17 -18
- mcli/workflow/scheduler/scheduler.py +37 -38
- mcli/workflow/secrets/__init__.py +1 -1
- mcli/workflow/sync/test_cmd.py +0 -1
- mcli/workflow/wakatime/__init__.py +5 -9
- mcli/workflow/wakatime/wakatime.py +1 -2
- {mcli_framework-7.12.1.dist-info → mcli_framework-7.12.3.dist-info}/METADATA +1 -1
- mcli_framework-7.12.3.dist-info/RECORD +279 -0
- mcli_framework-7.12.1.dist-info/RECORD +0 -279
- {mcli_framework-7.12.1.dist-info → mcli_framework-7.12.3.dist-info}/WHEEL +0 -0
- {mcli_framework-7.12.1.dist-info → mcli_framework-7.12.3.dist-info}/entry_points.txt +0 -0
- {mcli_framework-7.12.1.dist-info → mcli_framework-7.12.3.dist-info}/licenses/LICENSE +0 -0
- {mcli_framework-7.12.1.dist-info → mcli_framework-7.12.3.dist-info}/top_level.txt +0 -0
mcli/lib/pickles/__init__.py
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
from .pickles import ObjectCache
|
mcli/lib/pickles/pickles.py
CHANGED
mcli/lib/secrets/commands.py
CHANGED
|
@@ -17,7 +17,6 @@ from .store import SecretsStore
|
|
|
17
17
|
@click.group(name="secrets", help="Secure secrets management with encryption and git sync")
|
|
18
18
|
def secrets_group():
|
|
19
19
|
"""Secrets management commands."""
|
|
20
|
-
pass
|
|
21
20
|
|
|
22
21
|
|
|
23
22
|
@secrets_group.command(name="repl", help="Launch interactive secrets shell")
|
|
@@ -127,7 +126,6 @@ def secrets_import(env_file: str, namespace: str):
|
|
|
127
126
|
@secrets_group.group(name="store", help="Git-based secrets synchronization")
|
|
128
127
|
def store_group():
|
|
129
128
|
"""Store management commands."""
|
|
130
|
-
pass
|
|
131
129
|
|
|
132
130
|
|
|
133
131
|
@store_group.command(name="init", help="Initialize secrets store")
|
mcli/lib/secrets/manager.py
CHANGED
mcli/lib/secrets/repl.py
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
REPL (Read-Eval-Print Loop) for LSH secrets management.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
-
import os
|
|
6
5
|
from pathlib import Path
|
|
7
6
|
from typing import List
|
|
8
7
|
|
|
@@ -13,13 +12,13 @@ from prompt_toolkit.completion import WordCompleter
|
|
|
13
12
|
from prompt_toolkit.history import FileHistory
|
|
14
13
|
|
|
15
14
|
from mcli.lib.logger.logger import get_logger
|
|
16
|
-
|
|
17
|
-
logger = get_logger(__name__)
|
|
18
15
|
from mcli.lib.ui.styling import console, error, info, success, warning
|
|
19
16
|
|
|
20
17
|
from .manager import SecretsManager
|
|
21
18
|
from .store import SecretsStore
|
|
22
19
|
|
|
20
|
+
logger = get_logger(__name__)
|
|
21
|
+
|
|
23
22
|
|
|
24
23
|
class SecretsREPL:
|
|
25
24
|
"""Interactive REPL for secrets management."""
|
mcli/lib/secrets/store.py
CHANGED
|
@@ -7,13 +7,12 @@ import shutil
|
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
from typing import Any, Dict, Optional
|
|
9
9
|
|
|
10
|
-
import click
|
|
11
10
|
from git import GitCommandError, Repo
|
|
12
11
|
|
|
13
12
|
from mcli.lib.logger.logger import get_logger
|
|
13
|
+
from mcli.lib.ui.styling import error, info, success, warning
|
|
14
14
|
|
|
15
15
|
logger = get_logger(__name__)
|
|
16
|
-
from mcli.lib.ui.styling import error, info, success, warning
|
|
17
16
|
|
|
18
17
|
|
|
19
18
|
class SecretsStore:
|
|
@@ -8,7 +8,7 @@ import json
|
|
|
8
8
|
import time
|
|
9
9
|
from datetime import datetime, timezone
|
|
10
10
|
from pathlib import Path
|
|
11
|
-
from typing import Any,
|
|
11
|
+
from typing import Any, Dict, List, Optional
|
|
12
12
|
|
|
13
13
|
from mcli.lib.logger.logger import get_logger
|
|
14
14
|
|
|
@@ -18,7 +18,7 @@ logger = get_logger(__name__)
|
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
class DataPipelineConfig:
|
|
21
|
-
"""Configuration for data pipeline"""
|
|
21
|
+
"""Configuration for data pipeline."""
|
|
22
22
|
|
|
23
23
|
def __init__(self):
|
|
24
24
|
self.batch_size = 100
|
|
@@ -31,13 +31,13 @@ class DataPipelineConfig:
|
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class DataValidator:
|
|
34
|
-
"""Validates incoming data"""
|
|
34
|
+
"""Validates incoming data."""
|
|
35
35
|
|
|
36
36
|
def __init__(self):
|
|
37
37
|
self.logger = get_logger(f"{__name__}.validator")
|
|
38
38
|
|
|
39
39
|
async def validate_trading_record(self, record: Dict[str, Any]) -> bool:
|
|
40
|
-
"""Validate politician trading record"""
|
|
40
|
+
"""Validate politician trading record."""
|
|
41
41
|
required_fields = [
|
|
42
42
|
"politician_name",
|
|
43
43
|
"transaction_date",
|
|
@@ -69,7 +69,7 @@ class DataValidator:
|
|
|
69
69
|
return True
|
|
70
70
|
|
|
71
71
|
async def validate_supabase_record(self, table: str, record: Dict[str, Any]) -> bool:
|
|
72
|
-
"""Validate Supabase record based on table schema"""
|
|
72
|
+
"""Validate Supabase record based on table schema."""
|
|
73
73
|
if not record:
|
|
74
74
|
return False
|
|
75
75
|
|
|
@@ -82,13 +82,13 @@ class DataValidator:
|
|
|
82
82
|
|
|
83
83
|
|
|
84
84
|
class DataEnricher:
|
|
85
|
-
"""Enriches data with additional information"""
|
|
85
|
+
"""Enriches data with additional information."""
|
|
86
86
|
|
|
87
87
|
def __init__(self):
|
|
88
88
|
self.logger = get_logger(f"{__name__}.enricher")
|
|
89
89
|
|
|
90
90
|
async def enrich_trading_record(self, record: Dict[str, Any]) -> Dict[str, Any]:
|
|
91
|
-
"""Enrich trading record with additional data"""
|
|
91
|
+
"""Enrich trading record with additional data."""
|
|
92
92
|
enriched = record.copy()
|
|
93
93
|
|
|
94
94
|
# Add processing timestamp
|
|
@@ -115,7 +115,7 @@ class DataEnricher:
|
|
|
115
115
|
return enriched
|
|
116
116
|
|
|
117
117
|
def _categorize_amount(self, amount: float) -> str:
|
|
118
|
-
"""Categorize transaction amount"""
|
|
118
|
+
"""Categorize transaction amount."""
|
|
119
119
|
if amount < 1000:
|
|
120
120
|
return "micro"
|
|
121
121
|
elif amount < 15000:
|
|
@@ -128,7 +128,7 @@ class DataEnricher:
|
|
|
128
128
|
return "mega"
|
|
129
129
|
|
|
130
130
|
def _bucket_amount(self, amount: float) -> str:
|
|
131
|
-
"""Bucket amounts for analysis"""
|
|
131
|
+
"""Bucket amounts for analysis."""
|
|
132
132
|
if amount < 1000:
|
|
133
133
|
return "0-1K"
|
|
134
134
|
elif amount < 10000:
|
|
@@ -145,7 +145,7 @@ class DataEnricher:
|
|
|
145
145
|
return "1M+"
|
|
146
146
|
|
|
147
147
|
async def _get_politician_metadata(self, politician_name: str) -> Dict[str, Any]:
|
|
148
|
-
"""Get politician metadata (placeholder for external API)"""
|
|
148
|
+
"""Get politician metadata (placeholder for external API)."""
|
|
149
149
|
# This would typically call an external API
|
|
150
150
|
return {
|
|
151
151
|
"enriched_at": datetime.now(timezone.utc).isoformat(),
|
|
@@ -154,7 +154,7 @@ class DataEnricher:
|
|
|
154
154
|
}
|
|
155
155
|
|
|
156
156
|
async def _get_market_context(self, asset_name: str, transaction_date: str) -> Dict[str, Any]:
|
|
157
|
-
"""Get market context for the transaction (placeholder)"""
|
|
157
|
+
"""Get market context for the transaction (placeholder)."""
|
|
158
158
|
# This would typically call financial APIs
|
|
159
159
|
return {
|
|
160
160
|
"enriched_at": datetime.now(timezone.utc).isoformat(),
|
|
@@ -164,7 +164,7 @@ class DataEnricher:
|
|
|
164
164
|
|
|
165
165
|
|
|
166
166
|
class DataProcessor:
|
|
167
|
-
"""Main data processing engine"""
|
|
167
|
+
"""Main data processing engine."""
|
|
168
168
|
|
|
169
169
|
def __init__(self, config: DataPipelineConfig):
|
|
170
170
|
self.config = config
|
|
@@ -179,13 +179,13 @@ class DataProcessor:
|
|
|
179
179
|
self.config.output_dir.mkdir(parents=True, exist_ok=True)
|
|
180
180
|
|
|
181
181
|
async def process_trading_data(self, records: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
182
|
-
"""Process politician trading data"""
|
|
182
|
+
"""Process politician trading data."""
|
|
183
183
|
processed_records = []
|
|
184
184
|
|
|
185
185
|
for record in records:
|
|
186
186
|
try:
|
|
187
187
|
# Validate
|
|
188
|
-
if self.config.enable_validation:
|
|
188
|
+
if self.config.enable_validation: # noqa: SIM102
|
|
189
189
|
if not await self.validator.validate_trading_record(record):
|
|
190
190
|
self.logger.warning(
|
|
191
191
|
f"Validation failed for record: {record.get('id', 'unknown')}"
|
|
@@ -214,10 +214,10 @@ class DataProcessor:
|
|
|
214
214
|
async def process_supabase_sync(
|
|
215
215
|
self, table: str, operation: str, data: Dict[str, Any]
|
|
216
216
|
) -> Dict[str, Any]:
|
|
217
|
-
"""Process Supabase sync data"""
|
|
217
|
+
"""Process Supabase sync data."""
|
|
218
218
|
try:
|
|
219
219
|
# Validate
|
|
220
|
-
if self.config.enable_validation:
|
|
220
|
+
if self.config.enable_validation: # noqa: SIM102
|
|
221
221
|
if not await self.validator.validate_supabase_record(table, data):
|
|
222
222
|
self.logger.warning(f"Validation failed for {table} record")
|
|
223
223
|
return {}
|
|
@@ -239,7 +239,7 @@ class DataProcessor:
|
|
|
239
239
|
async def _transform_supabase_data(
|
|
240
240
|
self, table: str, operation: str, data: Dict[str, Any]
|
|
241
241
|
) -> Dict[str, Any]:
|
|
242
|
-
"""Transform Supabase data based on table schema"""
|
|
242
|
+
"""Transform Supabase data based on table schema."""
|
|
243
243
|
transformed = data.copy()
|
|
244
244
|
|
|
245
245
|
# Apply table-specific transformations
|
|
@@ -251,7 +251,7 @@ class DataProcessor:
|
|
|
251
251
|
return transformed
|
|
252
252
|
|
|
253
253
|
async def _transform_politician_table(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
254
|
-
"""Transform politician table data"""
|
|
254
|
+
"""Transform politician table data."""
|
|
255
255
|
# Normalize names
|
|
256
256
|
if "name" in data:
|
|
257
257
|
data["name_normalized"] = data["name"].title()
|
|
@@ -263,14 +263,14 @@ class DataProcessor:
|
|
|
263
263
|
return data
|
|
264
264
|
|
|
265
265
|
async def _transform_trading_table(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
266
|
-
"""Transform trading table data"""
|
|
266
|
+
"""Transform trading table data."""
|
|
267
267
|
# Normalize asset names
|
|
268
268
|
if "asset_name" in data:
|
|
269
269
|
data["asset_name_normalized"] = data["asset_name"].upper()
|
|
270
270
|
|
|
271
271
|
# Convert amounts to float
|
|
272
272
|
if "amount" in data and isinstance(data["amount"], str):
|
|
273
|
-
try:
|
|
273
|
+
try: # noqa: SIM105
|
|
274
274
|
data["amount_float"] = float(data["amount"])
|
|
275
275
|
except ValueError:
|
|
276
276
|
pass
|
|
@@ -278,7 +278,7 @@ class DataProcessor:
|
|
|
278
278
|
return data
|
|
279
279
|
|
|
280
280
|
async def add_to_batch(self, record: Dict[str, Any]):
|
|
281
|
-
"""Add record to batch for processing"""
|
|
281
|
+
"""Add record to batch for processing."""
|
|
282
282
|
async with self._processing_lock:
|
|
283
283
|
self.batch_buffer.append(record)
|
|
284
284
|
|
|
@@ -293,7 +293,7 @@ class DataProcessor:
|
|
|
293
293
|
await self._process_batch()
|
|
294
294
|
|
|
295
295
|
async def _process_batch(self):
|
|
296
|
-
"""Process accumulated batch"""
|
|
296
|
+
"""Process accumulated batch."""
|
|
297
297
|
if not self.batch_buffer:
|
|
298
298
|
return
|
|
299
299
|
|
|
@@ -319,7 +319,7 @@ class DataProcessor:
|
|
|
319
319
|
self.batch_buffer.extend(batch)
|
|
320
320
|
|
|
321
321
|
async def _save_batch(self, batch: List[Dict[str, Any]]):
|
|
322
|
-
"""Save processed batch to file"""
|
|
322
|
+
"""Save processed batch to file."""
|
|
323
323
|
if not batch:
|
|
324
324
|
return
|
|
325
325
|
|
|
@@ -338,18 +338,18 @@ class DataProcessor:
|
|
|
338
338
|
self.logger.error(f"Failed to save batch: {e}")
|
|
339
339
|
|
|
340
340
|
async def _emit_batch_completed(self, batch: List[Dict[str, Any]]):
|
|
341
|
-
"""Emit batch completion event"""
|
|
341
|
+
"""Emit batch completion event."""
|
|
342
342
|
self.logger.info(f"Batch processing completed: {len(batch)} records")
|
|
343
343
|
|
|
344
344
|
async def flush_batch(self):
|
|
345
|
-
"""Force process current batch"""
|
|
345
|
+
"""Force process current batch."""
|
|
346
346
|
async with self._processing_lock:
|
|
347
347
|
if self.batch_buffer:
|
|
348
348
|
await self._process_batch()
|
|
349
349
|
|
|
350
350
|
|
|
351
351
|
class LSHDataPipeline:
|
|
352
|
-
"""Main integration service for LSH-mcli data pipeline"""
|
|
352
|
+
"""Main integration service for LSH-mcli data pipeline."""
|
|
353
353
|
|
|
354
354
|
def __init__(self, lsh_client: LSHClient, config: Optional[DataPipelineConfig] = None):
|
|
355
355
|
self.lsh_client = lsh_client
|
|
@@ -363,13 +363,13 @@ class LSHDataPipeline:
|
|
|
363
363
|
self._setup_pipeline_handlers()
|
|
364
364
|
|
|
365
365
|
def _setup_pipeline_handlers(self):
|
|
366
|
-
"""Setup event handlers for pipeline processing"""
|
|
366
|
+
"""Setup event handlers for pipeline processing."""
|
|
367
367
|
self.lsh_client.on("lsh.job.completed", self._handle_job_completed)
|
|
368
368
|
self.lsh_client.on("lsh.supabase.sync", self._handle_supabase_sync)
|
|
369
369
|
self.lsh_client.on("trading.data.processed", self._handle_trading_data)
|
|
370
370
|
|
|
371
371
|
async def _handle_job_completed(self, event_data: Dict[str, Any]):
|
|
372
|
-
"""Handle LSH job completion"""
|
|
372
|
+
"""Handle LSH job completion."""
|
|
373
373
|
job_name = event_data.get("job_name", "")
|
|
374
374
|
job_id = event_data.get("job_id", "")
|
|
375
375
|
|
|
@@ -382,7 +382,7 @@ class LSHDataPipeline:
|
|
|
382
382
|
await self._process_job_output(job_id, stdout)
|
|
383
383
|
|
|
384
384
|
async def _handle_supabase_sync(self, event_data: Dict[str, Any]):
|
|
385
|
-
"""Handle Supabase sync event"""
|
|
385
|
+
"""Handle Supabase sync event."""
|
|
386
386
|
table = event_data.get("table", "")
|
|
387
387
|
operation = event_data.get("operation", "")
|
|
388
388
|
data = event_data.get("data", {})
|
|
@@ -394,7 +394,7 @@ class LSHDataPipeline:
|
|
|
394
394
|
await self.processor.add_to_batch(processed_data)
|
|
395
395
|
|
|
396
396
|
async def _handle_trading_data(self, event_data: Dict[str, Any]):
|
|
397
|
-
"""Handle processed trading data"""
|
|
397
|
+
"""Handle processed trading data."""
|
|
398
398
|
records = event_data.get("records", [])
|
|
399
399
|
|
|
400
400
|
self.logger.info(f"Received {len(records)} trading records for pipeline processing")
|
|
@@ -403,7 +403,7 @@ class LSHDataPipeline:
|
|
|
403
403
|
await self.processor.add_to_batch(record)
|
|
404
404
|
|
|
405
405
|
async def _process_job_output(self, job_id: str, output: str):
|
|
406
|
-
"""Process job output data"""
|
|
406
|
+
"""Process job output data."""
|
|
407
407
|
try:
|
|
408
408
|
# Parse output lines as JSON
|
|
409
409
|
records = []
|
|
@@ -425,7 +425,7 @@ class LSHDataPipeline:
|
|
|
425
425
|
self.logger.error(f"Error processing job output: {e}")
|
|
426
426
|
|
|
427
427
|
async def start(self):
|
|
428
|
-
"""Start the data pipeline"""
|
|
428
|
+
"""Start the data pipeline."""
|
|
429
429
|
if self._is_running:
|
|
430
430
|
self.logger.warning("Pipeline already running")
|
|
431
431
|
return
|
|
@@ -443,7 +443,7 @@ class LSHDataPipeline:
|
|
|
443
443
|
raise
|
|
444
444
|
|
|
445
445
|
async def stop(self):
|
|
446
|
-
"""Stop the data pipeline"""
|
|
446
|
+
"""Stop the data pipeline."""
|
|
447
447
|
if not self._is_running:
|
|
448
448
|
return
|
|
449
449
|
|
|
@@ -454,7 +454,7 @@ class LSHDataPipeline:
|
|
|
454
454
|
await self.processor.flush_batch()
|
|
455
455
|
|
|
456
456
|
async def get_stats(self) -> Dict[str, Any]:
|
|
457
|
-
"""Get pipeline statistics"""
|
|
457
|
+
"""Get pipeline statistics."""
|
|
458
458
|
return {
|
|
459
459
|
"is_running": self._is_running,
|
|
460
460
|
"batch_buffer_size": len(self.processor.batch_buffer),
|
mcli/lib/services/lsh_client.py
CHANGED
|
@@ -5,14 +5,12 @@ Provides integration with LSH daemon API server for data pipeline processing
|
|
|
5
5
|
|
|
6
6
|
import asyncio
|
|
7
7
|
import json
|
|
8
|
-
import logging
|
|
9
8
|
import os
|
|
10
9
|
import time
|
|
11
10
|
from typing import Any, Callable, Dict, List, Optional
|
|
12
11
|
from urllib.parse import urljoin
|
|
13
12
|
|
|
14
13
|
import aiohttp
|
|
15
|
-
import aiomqtt
|
|
16
14
|
from aiohttp_sse_client import client as sse_client
|
|
17
15
|
|
|
18
16
|
from mcli.lib.logger.logger import get_logger
|
|
@@ -21,7 +19,7 @@ logger = get_logger(__name__)
|
|
|
21
19
|
|
|
22
20
|
|
|
23
21
|
class LSHClient:
|
|
24
|
-
"""Client for connecting to LSH daemon API server"""
|
|
22
|
+
"""Client for connecting to LSH daemon API server."""
|
|
25
23
|
|
|
26
24
|
def __init__(
|
|
27
25
|
self,
|
|
@@ -39,30 +37,30 @@ class LSHClient:
|
|
|
39
37
|
logger.warning("LSH_API_KEY not set - authentication may fail")
|
|
40
38
|
|
|
41
39
|
async def __aenter__(self):
|
|
42
|
-
"""Async context manager entry"""
|
|
40
|
+
"""Async context manager entry."""
|
|
43
41
|
await self.connect()
|
|
44
42
|
return self
|
|
45
43
|
|
|
46
44
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
47
|
-
"""Async context manager exit"""
|
|
45
|
+
"""Async context manager exit."""
|
|
48
46
|
await self.disconnect()
|
|
49
47
|
|
|
50
48
|
async def connect(self):
|
|
51
|
-
"""Initialize aiohttp session"""
|
|
49
|
+
"""Initialize aiohttp session."""
|
|
52
50
|
if not self.session:
|
|
53
51
|
connector = aiohttp.TCPConnector(limit=10)
|
|
54
52
|
self.session = aiohttp.ClientSession(connector=connector, timeout=self.timeout)
|
|
55
53
|
logger.info(f"Connected to LSH API at {self.base_url}")
|
|
56
54
|
|
|
57
55
|
async def disconnect(self):
|
|
58
|
-
"""Close aiohttp session"""
|
|
56
|
+
"""Close aiohttp session."""
|
|
59
57
|
if self.session:
|
|
60
58
|
await self.session.close()
|
|
61
59
|
self.session = None
|
|
62
60
|
logger.info("Disconnected from LSH API")
|
|
63
61
|
|
|
64
62
|
def _get_headers(self) -> Dict[str, str]:
|
|
65
|
-
"""Get HTTP headers with authentication"""
|
|
63
|
+
"""Get HTTP headers with authentication."""
|
|
66
64
|
headers = {"Content-Type": "application/json"}
|
|
67
65
|
if self.api_key:
|
|
68
66
|
headers["X-API-Key"] = self.api_key
|
|
@@ -71,7 +69,7 @@ class LSHClient:
|
|
|
71
69
|
async def _request(
|
|
72
70
|
self, method: str, endpoint: str, data: Optional[Dict] = None
|
|
73
71
|
) -> Dict[str, Any]:
|
|
74
|
-
"""Make HTTP request to LSH API"""
|
|
72
|
+
"""Make HTTP request to LSH API."""
|
|
75
73
|
if not self.session:
|
|
76
74
|
await self.connect()
|
|
77
75
|
|
|
@@ -92,11 +90,11 @@ class LSHClient:
|
|
|
92
90
|
|
|
93
91
|
# Job Management
|
|
94
92
|
async def get_status(self) -> Dict[str, Any]:
|
|
95
|
-
"""Get LSH daemon status"""
|
|
93
|
+
"""Get LSH daemon status."""
|
|
96
94
|
return await self._request("GET", "/api/status")
|
|
97
95
|
|
|
98
96
|
async def list_jobs(self, filter_params: Optional[Dict] = None) -> List[Dict]:
|
|
99
|
-
"""List all jobs from LSH daemon"""
|
|
97
|
+
"""List all jobs from LSH daemon."""
|
|
100
98
|
endpoint = "/api/jobs"
|
|
101
99
|
if filter_params:
|
|
102
100
|
# Convert filter to query params
|
|
@@ -104,60 +102,60 @@ class LSHClient:
|
|
|
104
102
|
return await self._request("GET", endpoint)
|
|
105
103
|
|
|
106
104
|
async def get_job(self, job_id: str) -> Dict[str, Any]:
|
|
107
|
-
"""Get specific job details"""
|
|
105
|
+
"""Get specific job details."""
|
|
108
106
|
return await self._request("GET", f"/api/jobs/{job_id}")
|
|
109
107
|
|
|
110
108
|
async def create_job(self, job_spec: Dict[str, Any]) -> Dict[str, Any]:
|
|
111
|
-
"""Create a new job in LSH daemon"""
|
|
109
|
+
"""Create a new job in LSH daemon."""
|
|
112
110
|
return await self._request("POST", "/api/jobs", job_spec)
|
|
113
111
|
|
|
114
112
|
async def trigger_job(self, job_id: str) -> Dict[str, Any]:
|
|
115
|
-
"""Trigger job execution"""
|
|
113
|
+
"""Trigger job execution."""
|
|
116
114
|
return await self._request("POST", f"/api/jobs/{job_id}/trigger")
|
|
117
115
|
|
|
118
116
|
async def start_job(self, job_id: str) -> Dict[str, Any]:
|
|
119
|
-
"""Start a job"""
|
|
117
|
+
"""Start a job."""
|
|
120
118
|
return await self._request("POST", f"/api/jobs/{job_id}/start")
|
|
121
119
|
|
|
122
120
|
async def stop_job(self, job_id: str, signal: str = "SIGTERM") -> Dict[str, Any]:
|
|
123
|
-
"""Stop a job"""
|
|
121
|
+
"""Stop a job."""
|
|
124
122
|
return await self._request("POST", f"/api/jobs/{job_id}/stop", {"signal": signal})
|
|
125
123
|
|
|
126
124
|
async def remove_job(self, job_id: str, force: bool = False) -> None:
|
|
127
|
-
"""Remove a job"""
|
|
125
|
+
"""Remove a job."""
|
|
128
126
|
params = {"force": str(force).lower()}
|
|
129
127
|
endpoint = f"/api/jobs/{job_id}?" + "&".join(f"{k}={v}" for k, v in params.items())
|
|
130
128
|
await self._request("DELETE", endpoint)
|
|
131
129
|
|
|
132
130
|
async def bulk_create_jobs(self, jobs: List[Dict[str, Any]]) -> Dict[str, Any]:
|
|
133
|
-
"""Create multiple jobs"""
|
|
131
|
+
"""Create multiple jobs."""
|
|
134
132
|
return await self._request("POST", "/api/jobs/bulk", {"jobs": jobs})
|
|
135
133
|
|
|
136
134
|
# Data Export
|
|
137
135
|
async def export_jobs(self, format: str = "json") -> str:
|
|
138
|
-
"""Export job data"""
|
|
136
|
+
"""Export job data."""
|
|
139
137
|
endpoint = f"/api/export/jobs?format={format}"
|
|
140
138
|
return await self._request("GET", endpoint)
|
|
141
139
|
|
|
142
140
|
# Webhook Management
|
|
143
141
|
async def list_webhooks(self) -> Dict[str, Any]:
|
|
144
|
-
"""List configured webhooks"""
|
|
142
|
+
"""List configured webhooks."""
|
|
145
143
|
return await self._request("GET", "/api/webhooks")
|
|
146
144
|
|
|
147
145
|
async def add_webhook(self, endpoint_url: str) -> Dict[str, Any]:
|
|
148
|
-
"""Add webhook endpoint"""
|
|
146
|
+
"""Add webhook endpoint."""
|
|
149
147
|
return await self._request("POST", "/api/webhooks", {"endpoint": endpoint_url})
|
|
150
148
|
|
|
151
149
|
# Event Handling
|
|
152
150
|
def on(self, event_type: str, handler: Callable):
|
|
153
|
-
"""Register event handler"""
|
|
151
|
+
"""Register event handler."""
|
|
154
152
|
if event_type not in self._event_handlers:
|
|
155
153
|
self._event_handlers[event_type] = []
|
|
156
154
|
self._event_handlers[event_type].append(handler)
|
|
157
155
|
logger.info(f"Registered handler for event: {event_type}")
|
|
158
156
|
|
|
159
157
|
async def _emit_event(self, event_type: str, data: Any):
|
|
160
|
-
"""Emit event to registered handlers"""
|
|
158
|
+
"""Emit event to registered handlers."""
|
|
161
159
|
if event_type in self._event_handlers:
|
|
162
160
|
for handler in self._event_handlers[event_type]:
|
|
163
161
|
try:
|
|
@@ -169,7 +167,7 @@ class LSHClient:
|
|
|
169
167
|
logger.error(f"Error in event handler for {event_type}: {e}")
|
|
170
168
|
|
|
171
169
|
async def stream_events(self):
|
|
172
|
-
"""Stream events from LSH API using Server-Sent Events"""
|
|
170
|
+
"""Stream events from LSH API using Server-Sent Events."""
|
|
173
171
|
if not self.session:
|
|
174
172
|
await self.connect()
|
|
175
173
|
|
|
@@ -207,13 +205,13 @@ class LSHClient:
|
|
|
207
205
|
async def trigger_supabase_sync(
|
|
208
206
|
self, table: str, operation: str, data: Dict[str, Any]
|
|
209
207
|
) -> Dict[str, Any]:
|
|
210
|
-
"""Trigger Supabase data sync notification"""
|
|
208
|
+
"""Trigger Supabase data sync notification."""
|
|
211
209
|
payload = {"table": table, "operation": operation, "data": data}
|
|
212
210
|
return await self._request("POST", "/api/supabase/sync", payload)
|
|
213
211
|
|
|
214
212
|
# Health Check
|
|
215
213
|
async def health_check(self) -> bool:
|
|
216
|
-
"""Check if LSH API is healthy"""
|
|
214
|
+
"""Check if LSH API is healthy."""
|
|
217
215
|
try:
|
|
218
216
|
if not self.session:
|
|
219
217
|
await self.connect()
|
|
@@ -227,7 +225,7 @@ class LSHClient:
|
|
|
227
225
|
|
|
228
226
|
|
|
229
227
|
class LSHEventProcessor:
|
|
230
|
-
"""Process events from LSH daemon for data pipeline integration"""
|
|
228
|
+
"""Process events from LSH daemon for data pipeline integration."""
|
|
231
229
|
|
|
232
230
|
def __init__(self, lsh_client: LSHClient):
|
|
233
231
|
self.client = lsh_client
|
|
@@ -235,7 +233,7 @@ class LSHEventProcessor:
|
|
|
235
233
|
self._setup_event_handlers()
|
|
236
234
|
|
|
237
235
|
def _setup_event_handlers(self):
|
|
238
|
-
"""Setup default event handlers"""
|
|
236
|
+
"""Setup default event handlers."""
|
|
239
237
|
self.client.on("job:completed", self._handle_job_completed)
|
|
240
238
|
self.client.on("job:failed", self._handle_job_failed)
|
|
241
239
|
self.client.on("job:started", self._handle_job_started)
|
|
@@ -243,11 +241,11 @@ class LSHEventProcessor:
|
|
|
243
241
|
self.client.on("connected", self._handle_connected)
|
|
244
242
|
|
|
245
243
|
async def _handle_connected(self, data: Dict[str, Any]):
|
|
246
|
-
"""Handle connection established event"""
|
|
244
|
+
"""Handle connection established event."""
|
|
247
245
|
self.logger.info("Connected to LSH event stream")
|
|
248
246
|
|
|
249
247
|
async def _handle_job_started(self, data: Dict[str, Any]):
|
|
250
|
-
"""Handle job started event"""
|
|
248
|
+
"""Handle job started event."""
|
|
251
249
|
job_data = data.get("data", {})
|
|
252
250
|
job_id = job_data.get("id", "unknown")
|
|
253
251
|
job_name = job_data.get("name", "unknown")
|
|
@@ -266,7 +264,7 @@ class LSHEventProcessor:
|
|
|
266
264
|
)
|
|
267
265
|
|
|
268
266
|
async def _handle_job_completed(self, data: Dict[str, Any]):
|
|
269
|
-
"""Handle job completion event"""
|
|
267
|
+
"""Handle job completion event."""
|
|
270
268
|
job_data = data.get("data", {})
|
|
271
269
|
job_id = job_data.get("id", "unknown")
|
|
272
270
|
job_name = job_data.get("name", "unknown")
|
|
@@ -299,7 +297,7 @@ class LSHEventProcessor:
|
|
|
299
297
|
)
|
|
300
298
|
|
|
301
299
|
async def _handle_job_failed(self, data: Dict[str, Any]):
|
|
302
|
-
"""Handle job failure event"""
|
|
300
|
+
"""Handle job failure event."""
|
|
303
301
|
job_data = data.get("data", {})
|
|
304
302
|
job_id = job_data.get("id", "unknown")
|
|
305
303
|
job_name = job_data.get("name", "unknown")
|
|
@@ -320,7 +318,7 @@ class LSHEventProcessor:
|
|
|
320
318
|
)
|
|
321
319
|
|
|
322
320
|
async def _handle_supabase_sync(self, data: Dict[str, Any]):
|
|
323
|
-
"""Handle Supabase data sync event"""
|
|
321
|
+
"""Handle Supabase data sync event."""
|
|
324
322
|
table = data.get("table", "unknown")
|
|
325
323
|
operation = data.get("operation", "unknown")
|
|
326
324
|
sync_data = data.get("data", {})
|
|
@@ -343,7 +341,7 @@ class LSHEventProcessor:
|
|
|
343
341
|
)
|
|
344
342
|
|
|
345
343
|
async def _process_trading_data(self, job_data: Dict, stdout: str):
|
|
346
|
-
"""Process politician trading data from job output"""
|
|
344
|
+
"""Process politician trading data from job output."""
|
|
347
345
|
try:
|
|
348
346
|
# Parse trading data from stdout
|
|
349
347
|
if stdout.strip():
|
|
@@ -374,7 +372,7 @@ class LSHEventProcessor:
|
|
|
374
372
|
self.logger.error(f"Error processing trading data: {e}")
|
|
375
373
|
|
|
376
374
|
async def _process_supabase_job(self, job_data: Dict):
|
|
377
|
-
"""Process Supabase synchronization job"""
|
|
375
|
+
"""Process Supabase synchronization job."""
|
|
378
376
|
try:
|
|
379
377
|
# Check for database sync metadata
|
|
380
378
|
sync_info = job_data.get("databaseSync", {})
|
|
@@ -391,7 +389,7 @@ class LSHEventProcessor:
|
|
|
391
389
|
self.logger.error(f"Error processing Supabase job: {e}")
|
|
392
390
|
|
|
393
391
|
async def _process_politician_data(self, table: str, operation: str, data: Dict):
|
|
394
|
-
"""Process politician-related data changes"""
|
|
392
|
+
"""Process politician-related data changes."""
|
|
395
393
|
try:
|
|
396
394
|
self.logger.info(f"Processing politician data: {operation} on {table}")
|
|
397
395
|
|
|
@@ -414,7 +412,7 @@ class LSHEventProcessor:
|
|
|
414
412
|
self.logger.error(f"Error processing politician data: {e}")
|
|
415
413
|
|
|
416
414
|
async def _transform_politician_data(self, table: str, operation: str, data: Dict) -> Dict:
|
|
417
|
-
"""Transform politician data based on business rules"""
|
|
415
|
+
"""Transform politician data based on business rules."""
|
|
418
416
|
# Apply transformations here
|
|
419
417
|
transformed = data.copy()
|
|
420
418
|
|
|
@@ -431,7 +429,7 @@ class LSHEventProcessor:
|
|
|
431
429
|
return transformed
|
|
432
430
|
|
|
433
431
|
def _categorize_amount(self, amount: float) -> str:
|
|
434
|
-
"""Categorize transaction amounts"""
|
|
432
|
+
"""Categorize transaction amounts."""
|
|
435
433
|
if amount < 1000:
|
|
436
434
|
return "small"
|
|
437
435
|
elif amount < 50000:
|
|
@@ -442,11 +440,11 @@ class LSHEventProcessor:
|
|
|
442
440
|
return "very_large"
|
|
443
441
|
|
|
444
442
|
async def _emit_mcli_event(self, event_type: str, data: Dict[str, Any]):
|
|
445
|
-
"""Emit mcli-specific events (can be extended to use message queue)"""
|
|
443
|
+
"""Emit mcli-specific events (can be extended to use message queue)."""
|
|
446
444
|
self.logger.debug(f"Emitting mcli event: {event_type}")
|
|
447
445
|
# For now, just log - can be extended to use Redis, RabbitMQ, etc.
|
|
448
446
|
|
|
449
447
|
async def start_processing(self):
|
|
450
|
-
"""Start processing LSH events"""
|
|
448
|
+
"""Start processing LSH events."""
|
|
451
449
|
self.logger.info("Starting LSH event processing...")
|
|
452
450
|
await self.client.stream_events()
|
mcli/lib/shell/shell.py
CHANGED
|
@@ -5,7 +5,7 @@ import shutil
|
|
|
5
5
|
import subprocess
|
|
6
6
|
import sys
|
|
7
7
|
from pathlib import Path
|
|
8
|
-
from typing import Any, Dict
|
|
8
|
+
from typing import Any, Dict
|
|
9
9
|
|
|
10
10
|
from mcli.lib.logger.logger import get_logger, register_subprocess
|
|
11
11
|
|
|
@@ -13,7 +13,7 @@ logger = get_logger(__name__)
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
def shell_exec(script_path: str, function_name: str, *args) -> Dict[str, Any]:
|
|
16
|
-
"""Execute a shell script function with security checks and better error handling"""
|
|
16
|
+
"""Execute a shell script function with security checks and better error handling."""
|
|
17
17
|
# Validate script path
|
|
18
18
|
script_path = Path(script_path).resolve()
|
|
19
19
|
if not script_path.exists():
|
mcli/lib/toml/__init__.py
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
from .toml import read_from_toml
|