mcli-framework 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcli-framework might be problematic. Click here for more details.
- mcli/app/chat_cmd.py +42 -0
- mcli/app/commands_cmd.py +226 -0
- mcli/app/completion_cmd.py +216 -0
- mcli/app/completion_helpers.py +288 -0
- mcli/app/cron_test_cmd.py +697 -0
- mcli/app/logs_cmd.py +419 -0
- mcli/app/main.py +492 -0
- mcli/app/model/model.py +1060 -0
- mcli/app/model_cmd.py +227 -0
- mcli/app/redis_cmd.py +269 -0
- mcli/app/video/video.py +1114 -0
- mcli/app/visual_cmd.py +303 -0
- mcli/chat/chat.py +2409 -0
- mcli/chat/command_rag.py +514 -0
- mcli/chat/enhanced_chat.py +652 -0
- mcli/chat/system_controller.py +1010 -0
- mcli/chat/system_integration.py +1016 -0
- mcli/cli.py +25 -0
- mcli/config.toml +20 -0
- mcli/lib/api/api.py +586 -0
- mcli/lib/api/daemon_client.py +203 -0
- mcli/lib/api/daemon_client_local.py +44 -0
- mcli/lib/api/daemon_decorator.py +217 -0
- mcli/lib/api/mcli_decorators.py +1032 -0
- mcli/lib/auth/auth.py +85 -0
- mcli/lib/auth/aws_manager.py +85 -0
- mcli/lib/auth/azure_manager.py +91 -0
- mcli/lib/auth/credential_manager.py +192 -0
- mcli/lib/auth/gcp_manager.py +93 -0
- mcli/lib/auth/key_manager.py +117 -0
- mcli/lib/auth/mcli_manager.py +93 -0
- mcli/lib/auth/token_manager.py +75 -0
- mcli/lib/auth/token_util.py +1011 -0
- mcli/lib/config/config.py +47 -0
- mcli/lib/discovery/__init__.py +1 -0
- mcli/lib/discovery/command_discovery.py +274 -0
- mcli/lib/erd/erd.py +1345 -0
- mcli/lib/erd/generate_graph.py +453 -0
- mcli/lib/files/files.py +76 -0
- mcli/lib/fs/fs.py +109 -0
- mcli/lib/lib.py +29 -0
- mcli/lib/logger/logger.py +611 -0
- mcli/lib/performance/optimizer.py +409 -0
- mcli/lib/performance/rust_bridge.py +502 -0
- mcli/lib/performance/uvloop_config.py +154 -0
- mcli/lib/pickles/pickles.py +50 -0
- mcli/lib/search/cached_vectorizer.py +479 -0
- mcli/lib/services/data_pipeline.py +460 -0
- mcli/lib/services/lsh_client.py +441 -0
- mcli/lib/services/redis_service.py +387 -0
- mcli/lib/shell/shell.py +137 -0
- mcli/lib/toml/toml.py +33 -0
- mcli/lib/ui/styling.py +47 -0
- mcli/lib/ui/visual_effects.py +634 -0
- mcli/lib/watcher/watcher.py +185 -0
- mcli/ml/api/app.py +215 -0
- mcli/ml/api/middleware.py +224 -0
- mcli/ml/api/routers/admin_router.py +12 -0
- mcli/ml/api/routers/auth_router.py +244 -0
- mcli/ml/api/routers/backtest_router.py +12 -0
- mcli/ml/api/routers/data_router.py +12 -0
- mcli/ml/api/routers/model_router.py +302 -0
- mcli/ml/api/routers/monitoring_router.py +12 -0
- mcli/ml/api/routers/portfolio_router.py +12 -0
- mcli/ml/api/routers/prediction_router.py +267 -0
- mcli/ml/api/routers/trade_router.py +12 -0
- mcli/ml/api/routers/websocket_router.py +76 -0
- mcli/ml/api/schemas.py +64 -0
- mcli/ml/auth/auth_manager.py +425 -0
- mcli/ml/auth/models.py +154 -0
- mcli/ml/auth/permissions.py +302 -0
- mcli/ml/backtesting/backtest_engine.py +502 -0
- mcli/ml/backtesting/performance_metrics.py +393 -0
- mcli/ml/cache.py +400 -0
- mcli/ml/cli/main.py +398 -0
- mcli/ml/config/settings.py +394 -0
- mcli/ml/configs/dvc_config.py +230 -0
- mcli/ml/configs/mlflow_config.py +131 -0
- mcli/ml/configs/mlops_manager.py +293 -0
- mcli/ml/dashboard/app.py +532 -0
- mcli/ml/dashboard/app_integrated.py +738 -0
- mcli/ml/dashboard/app_supabase.py +560 -0
- mcli/ml/dashboard/app_training.py +615 -0
- mcli/ml/dashboard/cli.py +51 -0
- mcli/ml/data_ingestion/api_connectors.py +501 -0
- mcli/ml/data_ingestion/data_pipeline.py +567 -0
- mcli/ml/data_ingestion/stream_processor.py +512 -0
- mcli/ml/database/migrations/env.py +94 -0
- mcli/ml/database/models.py +667 -0
- mcli/ml/database/session.py +200 -0
- mcli/ml/experimentation/ab_testing.py +845 -0
- mcli/ml/features/ensemble_features.py +607 -0
- mcli/ml/features/political_features.py +676 -0
- mcli/ml/features/recommendation_engine.py +809 -0
- mcli/ml/features/stock_features.py +573 -0
- mcli/ml/features/test_feature_engineering.py +346 -0
- mcli/ml/logging.py +85 -0
- mcli/ml/mlops/data_versioning.py +518 -0
- mcli/ml/mlops/experiment_tracker.py +377 -0
- mcli/ml/mlops/model_serving.py +481 -0
- mcli/ml/mlops/pipeline_orchestrator.py +614 -0
- mcli/ml/models/base_models.py +324 -0
- mcli/ml/models/ensemble_models.py +675 -0
- mcli/ml/models/recommendation_models.py +474 -0
- mcli/ml/models/test_models.py +487 -0
- mcli/ml/monitoring/drift_detection.py +676 -0
- mcli/ml/monitoring/metrics.py +45 -0
- mcli/ml/optimization/portfolio_optimizer.py +834 -0
- mcli/ml/preprocessing/data_cleaners.py +451 -0
- mcli/ml/preprocessing/feature_extractors.py +491 -0
- mcli/ml/preprocessing/ml_pipeline.py +382 -0
- mcli/ml/preprocessing/politician_trading_preprocessor.py +569 -0
- mcli/ml/preprocessing/test_preprocessing.py +294 -0
- mcli/ml/scripts/populate_sample_data.py +200 -0
- mcli/ml/tasks.py +400 -0
- mcli/ml/tests/test_integration.py +429 -0
- mcli/ml/tests/test_training_dashboard.py +387 -0
- mcli/public/oi/oi.py +15 -0
- mcli/public/public.py +4 -0
- mcli/self/self_cmd.py +1246 -0
- mcli/workflow/daemon/api_daemon.py +800 -0
- mcli/workflow/daemon/async_command_database.py +681 -0
- mcli/workflow/daemon/async_process_manager.py +591 -0
- mcli/workflow/daemon/client.py +530 -0
- mcli/workflow/daemon/commands.py +1196 -0
- mcli/workflow/daemon/daemon.py +905 -0
- mcli/workflow/daemon/daemon_api.py +59 -0
- mcli/workflow/daemon/enhanced_daemon.py +571 -0
- mcli/workflow/daemon/process_cli.py +244 -0
- mcli/workflow/daemon/process_manager.py +439 -0
- mcli/workflow/daemon/test_daemon.py +275 -0
- mcli/workflow/dashboard/dashboard_cmd.py +113 -0
- mcli/workflow/docker/docker.py +0 -0
- mcli/workflow/file/file.py +100 -0
- mcli/workflow/gcloud/config.toml +21 -0
- mcli/workflow/gcloud/gcloud.py +58 -0
- mcli/workflow/git_commit/ai_service.py +328 -0
- mcli/workflow/git_commit/commands.py +430 -0
- mcli/workflow/lsh_integration.py +355 -0
- mcli/workflow/model_service/client.py +594 -0
- mcli/workflow/model_service/download_and_run_efficient_models.py +288 -0
- mcli/workflow/model_service/lightweight_embedder.py +397 -0
- mcli/workflow/model_service/lightweight_model_server.py +714 -0
- mcli/workflow/model_service/lightweight_test.py +241 -0
- mcli/workflow/model_service/model_service.py +1955 -0
- mcli/workflow/model_service/ollama_efficient_runner.py +425 -0
- mcli/workflow/model_service/pdf_processor.py +386 -0
- mcli/workflow/model_service/test_efficient_runner.py +234 -0
- mcli/workflow/model_service/test_example.py +315 -0
- mcli/workflow/model_service/test_integration.py +131 -0
- mcli/workflow/model_service/test_new_features.py +149 -0
- mcli/workflow/openai/openai.py +99 -0
- mcli/workflow/politician_trading/commands.py +1790 -0
- mcli/workflow/politician_trading/config.py +134 -0
- mcli/workflow/politician_trading/connectivity.py +490 -0
- mcli/workflow/politician_trading/data_sources.py +395 -0
- mcli/workflow/politician_trading/database.py +410 -0
- mcli/workflow/politician_trading/demo.py +248 -0
- mcli/workflow/politician_trading/models.py +165 -0
- mcli/workflow/politician_trading/monitoring.py +413 -0
- mcli/workflow/politician_trading/scrapers.py +966 -0
- mcli/workflow/politician_trading/scrapers_california.py +412 -0
- mcli/workflow/politician_trading/scrapers_eu.py +377 -0
- mcli/workflow/politician_trading/scrapers_uk.py +350 -0
- mcli/workflow/politician_trading/scrapers_us_states.py +438 -0
- mcli/workflow/politician_trading/supabase_functions.py +354 -0
- mcli/workflow/politician_trading/workflow.py +852 -0
- mcli/workflow/registry/registry.py +180 -0
- mcli/workflow/repo/repo.py +223 -0
- mcli/workflow/scheduler/commands.py +493 -0
- mcli/workflow/scheduler/cron_parser.py +238 -0
- mcli/workflow/scheduler/job.py +182 -0
- mcli/workflow/scheduler/monitor.py +139 -0
- mcli/workflow/scheduler/persistence.py +324 -0
- mcli/workflow/scheduler/scheduler.py +679 -0
- mcli/workflow/sync/sync_cmd.py +437 -0
- mcli/workflow/sync/test_cmd.py +314 -0
- mcli/workflow/videos/videos.py +242 -0
- mcli/workflow/wakatime/wakatime.py +11 -0
- mcli/workflow/workflow.py +37 -0
- mcli_framework-7.0.0.dist-info/METADATA +479 -0
- mcli_framework-7.0.0.dist-info/RECORD +186 -0
- mcli_framework-7.0.0.dist-info/WHEEL +5 -0
- mcli_framework-7.0.0.dist-info/entry_points.txt +7 -0
- mcli_framework-7.0.0.dist-info/licenses/LICENSE +21 -0
- mcli_framework-7.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,681 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import sqlite3
|
|
4
|
+
import uuid
|
|
5
|
+
from contextlib import asynccontextmanager
|
|
6
|
+
from dataclasses import asdict, dataclass
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Dict, List, Optional
|
|
10
|
+
|
|
11
|
+
import aiosqlite
|
|
12
|
+
import redis.asyncio as redis
|
|
13
|
+
|
|
14
|
+
from mcli.lib.logger.logger import get_logger
|
|
15
|
+
|
|
16
|
+
logger = get_logger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class Command:
|
|
21
|
+
"""Represents a stored command with enhanced metadata"""
|
|
22
|
+
|
|
23
|
+
id: str
|
|
24
|
+
name: str
|
|
25
|
+
description: str
|
|
26
|
+
code: str
|
|
27
|
+
language: str # 'python', 'node', 'lua', 'shell', 'rust'
|
|
28
|
+
group: Optional[str] = None
|
|
29
|
+
tags: Optional[List[str]] = None
|
|
30
|
+
created_at: Optional[datetime] = None
|
|
31
|
+
updated_at: Optional[datetime] = None
|
|
32
|
+
execution_count: int = 0
|
|
33
|
+
last_executed: Optional[datetime] = None
|
|
34
|
+
is_active: bool = True
|
|
35
|
+
version: str = "1.0"
|
|
36
|
+
author: Optional[str] = None
|
|
37
|
+
dependencies: Optional[List[str]] = None
|
|
38
|
+
|
|
39
|
+
def __post_init__(self):
|
|
40
|
+
if self.created_at is None:
|
|
41
|
+
self.created_at = datetime.now()
|
|
42
|
+
if self.updated_at is None:
|
|
43
|
+
self.updated_at = datetime.now()
|
|
44
|
+
if self.tags is None:
|
|
45
|
+
self.tags = []
|
|
46
|
+
if self.dependencies is None:
|
|
47
|
+
self.dependencies = []
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class ExecutionRecord:
|
|
52
|
+
"""Represents a command execution record"""
|
|
53
|
+
|
|
54
|
+
id: str
|
|
55
|
+
command_id: str
|
|
56
|
+
executed_at: datetime
|
|
57
|
+
status: str # 'success', 'failed', 'timeout'
|
|
58
|
+
output: Optional[str] = None
|
|
59
|
+
error: Optional[str] = None
|
|
60
|
+
execution_time_ms: Optional[int] = None
|
|
61
|
+
user: Optional[str] = None
|
|
62
|
+
context: Optional[Dict[str, Any]] = None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class AsyncCommandDatabase:
|
|
66
|
+
"""High-performance async command database with connection pooling and caching"""
|
|
67
|
+
|
|
68
|
+
def __init__(
|
|
69
|
+
self, db_path: Optional[str] = None, redis_url: Optional[str] = None, pool_size: int = 10
|
|
70
|
+
):
|
|
71
|
+
if db_path is None:
|
|
72
|
+
db_path = Path.home() / ".local" / "mcli" / "daemon" / "commands.db"
|
|
73
|
+
|
|
74
|
+
self.db_path = Path(db_path)
|
|
75
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
76
|
+
|
|
77
|
+
self.redis_url = redis_url or "redis://localhost:6379"
|
|
78
|
+
self.redis_client: Optional[redis.Redis] = None
|
|
79
|
+
|
|
80
|
+
# Connection pool
|
|
81
|
+
self.pool_size = pool_size
|
|
82
|
+
self._connection_pool: List[aiosqlite.Connection] = []
|
|
83
|
+
self._pool_lock = asyncio.Lock()
|
|
84
|
+
self._initialized = False
|
|
85
|
+
|
|
86
|
+
# Cache settings
|
|
87
|
+
self.cache_ttl = 3600 # 1 hour
|
|
88
|
+
self.enable_caching = True
|
|
89
|
+
|
|
90
|
+
async def initialize(self):
|
|
91
|
+
"""Initialize database and connection pool"""
|
|
92
|
+
if self._initialized:
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
await self._init_database()
|
|
96
|
+
await self._init_redis()
|
|
97
|
+
await self._init_connection_pool()
|
|
98
|
+
|
|
99
|
+
self._initialized = True
|
|
100
|
+
logger.info("AsyncCommandDatabase initialized successfully")
|
|
101
|
+
|
|
102
|
+
async def _init_database(self):
|
|
103
|
+
"""Initialize SQLite database with optimizations"""
|
|
104
|
+
async with aiosqlite.connect(self.db_path) as db:
|
|
105
|
+
# Enable performance optimizations
|
|
106
|
+
await db.execute("PRAGMA journal_mode=WAL")
|
|
107
|
+
await db.execute("PRAGMA synchronous=NORMAL")
|
|
108
|
+
await db.execute("PRAGMA cache_size=10000")
|
|
109
|
+
await db.execute("PRAGMA temp_store=memory")
|
|
110
|
+
await db.execute("PRAGMA mmap_size=268435456") # 256MB
|
|
111
|
+
|
|
112
|
+
# Create tables
|
|
113
|
+
await db.execute(
|
|
114
|
+
"""
|
|
115
|
+
CREATE TABLE IF NOT EXISTS commands (
|
|
116
|
+
id TEXT PRIMARY KEY,
|
|
117
|
+
name TEXT NOT NULL,
|
|
118
|
+
description TEXT,
|
|
119
|
+
code TEXT NOT NULL,
|
|
120
|
+
language TEXT NOT NULL,
|
|
121
|
+
group_name TEXT,
|
|
122
|
+
tags TEXT,
|
|
123
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
124
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
125
|
+
execution_count INTEGER DEFAULT 0,
|
|
126
|
+
last_executed TIMESTAMP,
|
|
127
|
+
is_active BOOLEAN DEFAULT 1,
|
|
128
|
+
version TEXT DEFAULT '1.0',
|
|
129
|
+
author TEXT,
|
|
130
|
+
dependencies TEXT
|
|
131
|
+
)
|
|
132
|
+
"""
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
await db.execute(
|
|
136
|
+
"""
|
|
137
|
+
CREATE TABLE IF NOT EXISTS groups (
|
|
138
|
+
id TEXT PRIMARY KEY,
|
|
139
|
+
name TEXT NOT NULL,
|
|
140
|
+
description TEXT,
|
|
141
|
+
parent_group_id TEXT,
|
|
142
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
143
|
+
metadata TEXT,
|
|
144
|
+
FOREIGN KEY (parent_group_id) REFERENCES groups (id)
|
|
145
|
+
)
|
|
146
|
+
"""
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
await db.execute(
|
|
150
|
+
"""
|
|
151
|
+
CREATE TABLE IF NOT EXISTS executions (
|
|
152
|
+
id TEXT PRIMARY KEY,
|
|
153
|
+
command_id TEXT NOT NULL,
|
|
154
|
+
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
155
|
+
status TEXT NOT NULL,
|
|
156
|
+
output TEXT,
|
|
157
|
+
error TEXT,
|
|
158
|
+
execution_time_ms INTEGER,
|
|
159
|
+
user TEXT,
|
|
160
|
+
context TEXT,
|
|
161
|
+
FOREIGN KEY (command_id) REFERENCES commands (id)
|
|
162
|
+
)
|
|
163
|
+
"""
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
# Create performance indexes
|
|
167
|
+
await db.execute("CREATE INDEX IF NOT EXISTS idx_commands_name ON commands(name)")
|
|
168
|
+
await db.execute(
|
|
169
|
+
"CREATE INDEX IF NOT EXISTS idx_commands_language ON commands(language)"
|
|
170
|
+
)
|
|
171
|
+
await db.execute(
|
|
172
|
+
"CREATE INDEX IF NOT EXISTS idx_commands_group ON commands(group_name)"
|
|
173
|
+
)
|
|
174
|
+
await db.execute(
|
|
175
|
+
"CREATE INDEX IF NOT EXISTS idx_commands_active ON commands(is_active)"
|
|
176
|
+
)
|
|
177
|
+
await db.execute(
|
|
178
|
+
"CREATE INDEX IF NOT EXISTS idx_commands_execution_count ON commands(execution_count)"
|
|
179
|
+
)
|
|
180
|
+
await db.execute(
|
|
181
|
+
"CREATE INDEX IF NOT EXISTS idx_executions_command_id ON executions(command_id)"
|
|
182
|
+
)
|
|
183
|
+
await db.execute(
|
|
184
|
+
"CREATE INDEX IF NOT EXISTS idx_executions_status ON executions(status)"
|
|
185
|
+
)
|
|
186
|
+
await db.execute(
|
|
187
|
+
"CREATE INDEX IF NOT EXISTS idx_executions_executed_at ON executions(executed_at)"
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Create full-text search for commands
|
|
191
|
+
await db.execute(
|
|
192
|
+
"""
|
|
193
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS commands_fts USING fts5(
|
|
194
|
+
id UNINDEXED,
|
|
195
|
+
name,
|
|
196
|
+
description,
|
|
197
|
+
tags,
|
|
198
|
+
content='commands',
|
|
199
|
+
content_rowid='rowid'
|
|
200
|
+
)
|
|
201
|
+
"""
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
# Create FTS triggers
|
|
205
|
+
await db.execute(
|
|
206
|
+
"""
|
|
207
|
+
CREATE TRIGGER IF NOT EXISTS commands_fts_insert AFTER INSERT ON commands BEGIN
|
|
208
|
+
INSERT INTO commands_fts(id, name, description, tags)
|
|
209
|
+
VALUES (new.id, new.name, new.description, new.tags);
|
|
210
|
+
END
|
|
211
|
+
"""
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
await db.execute(
|
|
215
|
+
"""
|
|
216
|
+
CREATE TRIGGER IF NOT EXISTS commands_fts_update AFTER UPDATE ON commands BEGIN
|
|
217
|
+
UPDATE commands_fts SET name=new.name, description=new.description, tags=new.tags
|
|
218
|
+
WHERE id=new.id;
|
|
219
|
+
END
|
|
220
|
+
"""
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
await db.execute(
|
|
224
|
+
"""
|
|
225
|
+
CREATE TRIGGER IF NOT EXISTS commands_fts_delete AFTER DELETE ON commands BEGIN
|
|
226
|
+
DELETE FROM commands_fts WHERE id=old.id;
|
|
227
|
+
END
|
|
228
|
+
"""
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
await db.commit()
|
|
232
|
+
|
|
233
|
+
async def _init_redis(self):
|
|
234
|
+
"""Initialize Redis connection for caching"""
|
|
235
|
+
if not self.enable_caching:
|
|
236
|
+
return
|
|
237
|
+
|
|
238
|
+
try:
|
|
239
|
+
self.redis_client = redis.from_url(self.redis_url, decode_responses=True)
|
|
240
|
+
await self.redis_client.ping()
|
|
241
|
+
logger.info("Connected to Redis for command caching")
|
|
242
|
+
except Exception as e:
|
|
243
|
+
logger.warning(f"Failed to connect to Redis: {e}. Caching disabled.")
|
|
244
|
+
self.redis_client = None
|
|
245
|
+
self.enable_caching = False
|
|
246
|
+
|
|
247
|
+
async def _init_connection_pool(self):
|
|
248
|
+
"""Initialize connection pool"""
|
|
249
|
+
async with self._pool_lock:
|
|
250
|
+
for _ in range(self.pool_size):
|
|
251
|
+
conn = await aiosqlite.connect(self.db_path)
|
|
252
|
+
await conn.execute("PRAGMA journal_mode=WAL")
|
|
253
|
+
await conn.execute("PRAGMA synchronous=NORMAL")
|
|
254
|
+
self._connection_pool.append(conn)
|
|
255
|
+
|
|
256
|
+
@asynccontextmanager
|
|
257
|
+
async def _get_connection(self):
|
|
258
|
+
"""Get a database connection from the pool"""
|
|
259
|
+
async with self._pool_lock:
|
|
260
|
+
if self._connection_pool:
|
|
261
|
+
conn = self._connection_pool.pop()
|
|
262
|
+
else:
|
|
263
|
+
conn = await aiosqlite.connect(self.db_path)
|
|
264
|
+
await conn.execute("PRAGMA journal_mode=WAL")
|
|
265
|
+
|
|
266
|
+
try:
|
|
267
|
+
yield conn
|
|
268
|
+
finally:
|
|
269
|
+
async with self._pool_lock:
|
|
270
|
+
if len(self._connection_pool) < self.pool_size:
|
|
271
|
+
self._connection_pool.append(conn)
|
|
272
|
+
else:
|
|
273
|
+
await conn.close()
|
|
274
|
+
|
|
275
|
+
async def add_command(self, command: Command) -> str:
|
|
276
|
+
"""Add a new command to the database"""
|
|
277
|
+
if not command.id:
|
|
278
|
+
command.id = str(uuid.uuid4())
|
|
279
|
+
|
|
280
|
+
command.updated_at = datetime.now()
|
|
281
|
+
|
|
282
|
+
async with self._get_connection() as db:
|
|
283
|
+
try:
|
|
284
|
+
await db.execute(
|
|
285
|
+
"""
|
|
286
|
+
INSERT INTO commands
|
|
287
|
+
(id, name, description, code, language, group_name, tags,
|
|
288
|
+
created_at, updated_at, execution_count, last_executed, is_active,
|
|
289
|
+
version, author, dependencies)
|
|
290
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
291
|
+
""",
|
|
292
|
+
(
|
|
293
|
+
command.id,
|
|
294
|
+
command.name,
|
|
295
|
+
command.description,
|
|
296
|
+
command.code,
|
|
297
|
+
command.language,
|
|
298
|
+
command.group,
|
|
299
|
+
json.dumps(command.tags),
|
|
300
|
+
command.created_at.isoformat(),
|
|
301
|
+
command.updated_at.isoformat(),
|
|
302
|
+
command.execution_count,
|
|
303
|
+
command.last_executed.isoformat() if command.last_executed else None,
|
|
304
|
+
command.is_active,
|
|
305
|
+
command.version,
|
|
306
|
+
command.author,
|
|
307
|
+
json.dumps(command.dependencies),
|
|
308
|
+
),
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
await db.commit()
|
|
312
|
+
|
|
313
|
+
# Cache the command
|
|
314
|
+
if self.enable_caching and self.redis_client:
|
|
315
|
+
await self._cache_command(command)
|
|
316
|
+
|
|
317
|
+
logger.info(f"Added command: {command.name} ({command.id})")
|
|
318
|
+
return command.id
|
|
319
|
+
|
|
320
|
+
except Exception as e:
|
|
321
|
+
logger.error(f"Error adding command: {e}")
|
|
322
|
+
await db.rollback()
|
|
323
|
+
raise
|
|
324
|
+
|
|
325
|
+
async def get_command(self, command_id: str) -> Optional[Command]:
|
|
326
|
+
"""Get a command by ID with caching"""
|
|
327
|
+
# Try cache first
|
|
328
|
+
if self.enable_caching and self.redis_client:
|
|
329
|
+
cached = await self._get_cached_command(command_id)
|
|
330
|
+
if cached:
|
|
331
|
+
return cached
|
|
332
|
+
|
|
333
|
+
async with self._get_connection() as db:
|
|
334
|
+
async with db.execute(
|
|
335
|
+
"SELECT * FROM commands WHERE id = ? AND is_active = 1", (command_id,)
|
|
336
|
+
) as cursor:
|
|
337
|
+
row = await cursor.fetchone()
|
|
338
|
+
if row:
|
|
339
|
+
command = self._row_to_command(row)
|
|
340
|
+
|
|
341
|
+
# Cache the result
|
|
342
|
+
if self.enable_caching and self.redis_client:
|
|
343
|
+
await self._cache_command(command)
|
|
344
|
+
|
|
345
|
+
return command
|
|
346
|
+
|
|
347
|
+
return None
|
|
348
|
+
|
|
349
|
+
async def update_command(self, command: Command) -> bool:
|
|
350
|
+
"""Update an existing command"""
|
|
351
|
+
command.updated_at = datetime.now()
|
|
352
|
+
|
|
353
|
+
async with self._get_connection() as db:
|
|
354
|
+
try:
|
|
355
|
+
result = await db.execute(
|
|
356
|
+
"""
|
|
357
|
+
UPDATE commands SET
|
|
358
|
+
name=?, description=?, code=?, language=?, group_name=?, tags=?,
|
|
359
|
+
updated_at=?, execution_count=?, last_executed=?, is_active=?,
|
|
360
|
+
version=?, author=?, dependencies=?
|
|
361
|
+
WHERE id=?
|
|
362
|
+
""",
|
|
363
|
+
(
|
|
364
|
+
command.name,
|
|
365
|
+
command.description,
|
|
366
|
+
command.code,
|
|
367
|
+
command.language,
|
|
368
|
+
command.group,
|
|
369
|
+
json.dumps(command.tags),
|
|
370
|
+
command.updated_at.isoformat(),
|
|
371
|
+
command.execution_count,
|
|
372
|
+
command.last_executed.isoformat() if command.last_executed else None,
|
|
373
|
+
command.is_active,
|
|
374
|
+
command.version,
|
|
375
|
+
command.author,
|
|
376
|
+
json.dumps(command.dependencies),
|
|
377
|
+
command.id,
|
|
378
|
+
),
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
await db.commit()
|
|
382
|
+
|
|
383
|
+
if result.rowcount > 0:
|
|
384
|
+
# Update cache
|
|
385
|
+
if self.enable_caching and self.redis_client:
|
|
386
|
+
await self._cache_command(command)
|
|
387
|
+
|
|
388
|
+
logger.info(f"Updated command: {command.name} ({command.id})")
|
|
389
|
+
return True
|
|
390
|
+
|
|
391
|
+
return False
|
|
392
|
+
|
|
393
|
+
except Exception as e:
|
|
394
|
+
logger.error(f"Error updating command: {e}")
|
|
395
|
+
await db.rollback()
|
|
396
|
+
raise
|
|
397
|
+
|
|
398
|
+
async def delete_command(self, command_id: str) -> bool:
|
|
399
|
+
"""Delete a command (soft delete)"""
|
|
400
|
+
async with self._get_connection() as db:
|
|
401
|
+
try:
|
|
402
|
+
result = await db.execute(
|
|
403
|
+
"UPDATE commands SET is_active = 0, updated_at = ? WHERE id = ?",
|
|
404
|
+
(datetime.now().isoformat(), command_id),
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
await db.commit()
|
|
408
|
+
|
|
409
|
+
if result.rowcount > 0:
|
|
410
|
+
# Remove from cache
|
|
411
|
+
if self.enable_caching and self.redis_client:
|
|
412
|
+
await self.redis_client.delete(f"command:{command_id}")
|
|
413
|
+
|
|
414
|
+
logger.info(f"Deleted command: {command_id}")
|
|
415
|
+
return True
|
|
416
|
+
|
|
417
|
+
return False
|
|
418
|
+
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.error(f"Error deleting command: {e}")
|
|
421
|
+
await db.rollback()
|
|
422
|
+
raise
|
|
423
|
+
|
|
424
|
+
async def search_commands(self, query: str, limit: int = 50) -> List[Command]:
|
|
425
|
+
"""Full-text search for commands"""
|
|
426
|
+
if not query.strip():
|
|
427
|
+
return await self.get_all_commands(limit=limit)
|
|
428
|
+
|
|
429
|
+
# Use FTS for efficient search
|
|
430
|
+
async with self._get_connection() as db:
|
|
431
|
+
# Prepare FTS query
|
|
432
|
+
fts_query = " ".join(f'"{word}"*' for word in query.split() if word.strip())
|
|
433
|
+
|
|
434
|
+
async with db.execute(
|
|
435
|
+
"""
|
|
436
|
+
SELECT c.* FROM commands c
|
|
437
|
+
JOIN commands_fts fts ON c.id = fts.id
|
|
438
|
+
WHERE commands_fts MATCH ? AND c.is_active = 1
|
|
439
|
+
ORDER BY rank, c.execution_count DESC, c.updated_at DESC
|
|
440
|
+
LIMIT ?
|
|
441
|
+
""",
|
|
442
|
+
(fts_query, limit),
|
|
443
|
+
) as cursor:
|
|
444
|
+
commands = []
|
|
445
|
+
async for row in cursor:
|
|
446
|
+
commands.append(self._row_to_command(row))
|
|
447
|
+
return commands
|
|
448
|
+
|
|
449
|
+
async def get_all_commands(
|
|
450
|
+
self,
|
|
451
|
+
group: Optional[str] = None,
|
|
452
|
+
language: Optional[str] = None,
|
|
453
|
+
limit: int = 100,
|
|
454
|
+
offset: int = 0,
|
|
455
|
+
) -> List[Command]:
|
|
456
|
+
"""Get all commands with optional filtering"""
|
|
457
|
+
where_clauses = ["is_active = 1"]
|
|
458
|
+
params = []
|
|
459
|
+
|
|
460
|
+
if group:
|
|
461
|
+
where_clauses.append("group_name = ?")
|
|
462
|
+
params.append(group)
|
|
463
|
+
|
|
464
|
+
if language:
|
|
465
|
+
where_clauses.append("language = ?")
|
|
466
|
+
params.append(language)
|
|
467
|
+
|
|
468
|
+
params.extend([limit, offset])
|
|
469
|
+
|
|
470
|
+
query = f"""
|
|
471
|
+
SELECT * FROM commands
|
|
472
|
+
WHERE {" AND ".join(where_clauses)}
|
|
473
|
+
ORDER BY execution_count DESC, updated_at DESC
|
|
474
|
+
LIMIT ? OFFSET ?
|
|
475
|
+
"""
|
|
476
|
+
|
|
477
|
+
async with self._get_connection() as db:
|
|
478
|
+
async with db.execute(query, params) as cursor:
|
|
479
|
+
commands = []
|
|
480
|
+
async for row in cursor:
|
|
481
|
+
commands.append(self._row_to_command(row))
|
|
482
|
+
return commands
|
|
483
|
+
|
|
484
|
+
async def get_popular_commands(self, limit: int = 10) -> List[Command]:
|
|
485
|
+
"""Get most popular commands by execution count"""
|
|
486
|
+
async with self._get_connection() as db:
|
|
487
|
+
async with db.execute(
|
|
488
|
+
"""
|
|
489
|
+
SELECT * FROM commands
|
|
490
|
+
WHERE is_active = 1 AND execution_count > 0
|
|
491
|
+
ORDER BY execution_count DESC, updated_at DESC
|
|
492
|
+
LIMIT ?
|
|
493
|
+
""",
|
|
494
|
+
(limit,),
|
|
495
|
+
) as cursor:
|
|
496
|
+
commands = []
|
|
497
|
+
async for row in cursor:
|
|
498
|
+
commands.append(self._row_to_command(row))
|
|
499
|
+
return commands
|
|
500
|
+
|
|
501
|
+
async def record_execution(self, execution: ExecutionRecord):
|
|
502
|
+
"""Record a command execution"""
|
|
503
|
+
async with self._get_connection() as db:
|
|
504
|
+
try:
|
|
505
|
+
await db.execute(
|
|
506
|
+
"""
|
|
507
|
+
INSERT INTO executions
|
|
508
|
+
(id, command_id, executed_at, status, output, error, execution_time_ms, user, context)
|
|
509
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
510
|
+
""",
|
|
511
|
+
(
|
|
512
|
+
execution.id,
|
|
513
|
+
execution.command_id,
|
|
514
|
+
execution.executed_at.isoformat(),
|
|
515
|
+
execution.status,
|
|
516
|
+
execution.output,
|
|
517
|
+
execution.error,
|
|
518
|
+
execution.execution_time_ms,
|
|
519
|
+
execution.user,
|
|
520
|
+
json.dumps(execution.context) if execution.context else None,
|
|
521
|
+
),
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
# Update command execution count
|
|
525
|
+
await db.execute(
|
|
526
|
+
"""
|
|
527
|
+
UPDATE commands SET
|
|
528
|
+
execution_count = execution_count + 1,
|
|
529
|
+
last_executed = ?
|
|
530
|
+
WHERE id = ?
|
|
531
|
+
""",
|
|
532
|
+
(execution.executed_at.isoformat(), execution.command_id),
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
await db.commit()
|
|
536
|
+
|
|
537
|
+
# Invalidate cache for the command
|
|
538
|
+
if self.enable_caching and self.redis_client:
|
|
539
|
+
await self.redis_client.delete(f"command:{execution.command_id}")
|
|
540
|
+
|
|
541
|
+
except Exception as e:
|
|
542
|
+
logger.error(f"Error recording execution: {e}")
|
|
543
|
+
await db.rollback()
|
|
544
|
+
raise
|
|
545
|
+
|
|
546
|
+
async def get_execution_history(
|
|
547
|
+
self, command_id: Optional[str] = None, limit: int = 100
|
|
548
|
+
) -> List[ExecutionRecord]:
|
|
549
|
+
"""Get execution history"""
|
|
550
|
+
query = "SELECT * FROM executions"
|
|
551
|
+
params = []
|
|
552
|
+
|
|
553
|
+
if command_id:
|
|
554
|
+
query += " WHERE command_id = ?"
|
|
555
|
+
params.append(command_id)
|
|
556
|
+
|
|
557
|
+
query += " ORDER BY executed_at DESC LIMIT ?"
|
|
558
|
+
params.append(limit)
|
|
559
|
+
|
|
560
|
+
async with self._get_connection() as db:
|
|
561
|
+
async with db.execute(query, params) as cursor:
|
|
562
|
+
executions = []
|
|
563
|
+
async for row in cursor:
|
|
564
|
+
executions.append(
|
|
565
|
+
ExecutionRecord(
|
|
566
|
+
id=row[0],
|
|
567
|
+
command_id=row[1],
|
|
568
|
+
executed_at=datetime.fromisoformat(row[2]),
|
|
569
|
+
status=row[3],
|
|
570
|
+
output=row[4],
|
|
571
|
+
error=row[5],
|
|
572
|
+
execution_time_ms=row[6],
|
|
573
|
+
user=row[7],
|
|
574
|
+
context=json.loads(row[8]) if row[8] else None,
|
|
575
|
+
)
|
|
576
|
+
)
|
|
577
|
+
return executions
|
|
578
|
+
|
|
579
|
+
async def _cache_command(self, command: Command):
|
|
580
|
+
"""Cache a command in Redis"""
|
|
581
|
+
if not self.redis_client:
|
|
582
|
+
return
|
|
583
|
+
|
|
584
|
+
try:
|
|
585
|
+
command_data = {
|
|
586
|
+
"id": command.id,
|
|
587
|
+
"name": command.name,
|
|
588
|
+
"description": command.description,
|
|
589
|
+
"code": command.code,
|
|
590
|
+
"language": command.language,
|
|
591
|
+
"group": command.group,
|
|
592
|
+
"tags": json.dumps(command.tags),
|
|
593
|
+
"execution_count": command.execution_count,
|
|
594
|
+
"is_active": command.is_active,
|
|
595
|
+
"version": command.version,
|
|
596
|
+
"author": command.author,
|
|
597
|
+
"dependencies": json.dumps(command.dependencies),
|
|
598
|
+
"created_at": command.created_at.isoformat() if command.created_at else None,
|
|
599
|
+
"updated_at": command.updated_at.isoformat() if command.updated_at else None,
|
|
600
|
+
"last_executed": (
|
|
601
|
+
command.last_executed.isoformat() if command.last_executed else None
|
|
602
|
+
),
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
await self.redis_client.hset(f"command:{command.id}", mapping=command_data)
|
|
606
|
+
await self.redis_client.expire(f"command:{command.id}", self.cache_ttl)
|
|
607
|
+
|
|
608
|
+
except Exception as e:
|
|
609
|
+
logger.warning(f"Failed to cache command {command.id}: {e}")
|
|
610
|
+
|
|
611
|
+
async def _get_cached_command(self, command_id: str) -> Optional[Command]:
|
|
612
|
+
"""Get a command from Redis cache"""
|
|
613
|
+
if not self.redis_client:
|
|
614
|
+
return None
|
|
615
|
+
|
|
616
|
+
try:
|
|
617
|
+
data = await self.redis_client.hgetall(f"command:{command_id}")
|
|
618
|
+
if not data:
|
|
619
|
+
return None
|
|
620
|
+
|
|
621
|
+
return Command(
|
|
622
|
+
id=data["id"],
|
|
623
|
+
name=data["name"],
|
|
624
|
+
description=data.get("description"),
|
|
625
|
+
code=data["code"],
|
|
626
|
+
language=data["language"],
|
|
627
|
+
group=data.get("group"),
|
|
628
|
+
tags=json.loads(data.get("tags", "[]")),
|
|
629
|
+
execution_count=int(data.get("execution_count", 0)),
|
|
630
|
+
is_active=bool(int(data.get("is_active", 1))),
|
|
631
|
+
version=data.get("version", "1.0"),
|
|
632
|
+
author=data.get("author"),
|
|
633
|
+
dependencies=json.loads(data.get("dependencies", "[]")),
|
|
634
|
+
created_at=(
|
|
635
|
+
datetime.fromisoformat(data["created_at"]) if data.get("created_at") else None
|
|
636
|
+
),
|
|
637
|
+
updated_at=(
|
|
638
|
+
datetime.fromisoformat(data["updated_at"]) if data.get("updated_at") else None
|
|
639
|
+
),
|
|
640
|
+
last_executed=(
|
|
641
|
+
datetime.fromisoformat(data["last_executed"])
|
|
642
|
+
if data.get("last_executed")
|
|
643
|
+
else None
|
|
644
|
+
),
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
except Exception as e:
|
|
648
|
+
logger.warning(f"Failed to get cached command {command_id}: {e}")
|
|
649
|
+
return None
|
|
650
|
+
|
|
651
|
+
def _row_to_command(self, row) -> Command:
|
|
652
|
+
"""Convert database row to Command object"""
|
|
653
|
+
return Command(
|
|
654
|
+
id=row[0],
|
|
655
|
+
name=row[1],
|
|
656
|
+
description=row[2],
|
|
657
|
+
code=row[3],
|
|
658
|
+
language=row[4],
|
|
659
|
+
group=row[5],
|
|
660
|
+
tags=json.loads(row[6]) if row[6] else [],
|
|
661
|
+
created_at=datetime.fromisoformat(row[7]) if row[7] else None,
|
|
662
|
+
updated_at=datetime.fromisoformat(row[8]) if row[8] else None,
|
|
663
|
+
execution_count=row[9] or 0,
|
|
664
|
+
last_executed=datetime.fromisoformat(row[10]) if row[10] else None,
|
|
665
|
+
is_active=bool(row[11]),
|
|
666
|
+
version=row[12] if len(row) > 12 else "1.0",
|
|
667
|
+
author=row[13] if len(row) > 13 else None,
|
|
668
|
+
dependencies=json.loads(row[14]) if len(row) > 14 and row[14] else [],
|
|
669
|
+
)
|
|
670
|
+
|
|
671
|
+
async def close(self):
|
|
672
|
+
"""Clean up resources"""
|
|
673
|
+
async with self._pool_lock:
|
|
674
|
+
for conn in self._connection_pool:
|
|
675
|
+
await conn.close()
|
|
676
|
+
self._connection_pool.clear()
|
|
677
|
+
|
|
678
|
+
if self.redis_client:
|
|
679
|
+
await self.redis_client.close()
|
|
680
|
+
|
|
681
|
+
logger.info("AsyncCommandDatabase closed")
|