mcli-framework 7.12.0__py3-none-any.whl → 7.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (216) hide show
  1. mcli/app/__init__.py +0 -2
  2. mcli/app/commands_cmd.py +19 -23
  3. mcli/app/completion_helpers.py +5 -5
  4. mcli/app/init_cmd.py +10 -10
  5. mcli/app/lock_cmd.py +82 -27
  6. mcli/app/main.py +4 -50
  7. mcli/app/model/model.py +5 -10
  8. mcli/app/store_cmd.py +8 -8
  9. mcli/app/video/__init__.py +0 -2
  10. mcli/app/video/video.py +1 -14
  11. mcli/chat/chat.py +90 -108
  12. mcli/chat/command_rag.py +0 -4
  13. mcli/chat/enhanced_chat.py +32 -41
  14. mcli/chat/system_controller.py +37 -37
  15. mcli/chat/system_integration.py +4 -5
  16. mcli/cli.py +2 -3
  17. mcli/lib/api/api.py +4 -9
  18. mcli/lib/api/daemon_client.py +19 -20
  19. mcli/lib/api/daemon_client_local.py +1 -3
  20. mcli/lib/api/daemon_decorator.py +6 -6
  21. mcli/lib/api/mcli_decorators.py +4 -8
  22. mcli/lib/auth/__init__.py +0 -1
  23. mcli/lib/auth/auth.py +4 -5
  24. mcli/lib/auth/mcli_manager.py +7 -12
  25. mcli/lib/auth/token_util.py +5 -5
  26. mcli/lib/config/__init__.py +29 -1
  27. mcli/lib/config/config.py +0 -1
  28. mcli/lib/custom_commands.py +1 -1
  29. mcli/lib/discovery/command_discovery.py +15 -15
  30. mcli/lib/erd/erd.py +7 -7
  31. mcli/lib/files/files.py +1 -1
  32. mcli/lib/fs/__init__.py +31 -1
  33. mcli/lib/fs/fs.py +12 -13
  34. mcli/lib/lib.py +0 -1
  35. mcli/lib/logger/logger.py +7 -10
  36. mcli/lib/performance/optimizer.py +25 -27
  37. mcli/lib/performance/rust_bridge.py +22 -27
  38. mcli/lib/performance/uvloop_config.py +0 -1
  39. mcli/lib/pickles/__init__.py +0 -1
  40. mcli/lib/pickles/pickles.py +0 -2
  41. mcli/lib/secrets/commands.py +0 -2
  42. mcli/lib/secrets/manager.py +0 -1
  43. mcli/lib/secrets/repl.py +2 -3
  44. mcli/lib/secrets/store.py +1 -2
  45. mcli/lib/services/data_pipeline.py +34 -34
  46. mcli/lib/services/lsh_client.py +38 -40
  47. mcli/lib/shell/shell.py +2 -2
  48. mcli/lib/toml/__init__.py +0 -1
  49. mcli/lib/ui/styling.py +0 -1
  50. mcli/lib/ui/visual_effects.py +33 -41
  51. mcli/lib/watcher/watcher.py +0 -1
  52. mcli/ml/__init__.py +1 -1
  53. mcli/ml/api/__init__.py +1 -1
  54. mcli/ml/api/app.py +8 -9
  55. mcli/ml/api/middleware.py +10 -10
  56. mcli/ml/api/routers/__init__.py +1 -1
  57. mcli/ml/api/routers/admin_router.py +3 -3
  58. mcli/ml/api/routers/auth_router.py +17 -18
  59. mcli/ml/api/routers/backtest_router.py +2 -2
  60. mcli/ml/api/routers/data_router.py +2 -2
  61. mcli/ml/api/routers/model_router.py +14 -15
  62. mcli/ml/api/routers/monitoring_router.py +2 -2
  63. mcli/ml/api/routers/portfolio_router.py +2 -2
  64. mcli/ml/api/routers/prediction_router.py +10 -9
  65. mcli/ml/api/routers/trade_router.py +2 -2
  66. mcli/ml/api/routers/websocket_router.py +6 -7
  67. mcli/ml/api/schemas.py +2 -2
  68. mcli/ml/auth/__init__.py +1 -1
  69. mcli/ml/auth/auth_manager.py +22 -23
  70. mcli/ml/auth/models.py +17 -17
  71. mcli/ml/auth/permissions.py +17 -17
  72. mcli/ml/backtesting/__init__.py +1 -1
  73. mcli/ml/backtesting/backtest_engine.py +31 -35
  74. mcli/ml/backtesting/performance_metrics.py +12 -14
  75. mcli/ml/backtesting/run.py +1 -2
  76. mcli/ml/cache.py +35 -36
  77. mcli/ml/cli/__init__.py +1 -1
  78. mcli/ml/cli/main.py +21 -24
  79. mcli/ml/config/__init__.py +1 -1
  80. mcli/ml/config/settings.py +28 -29
  81. mcli/ml/configs/__init__.py +1 -1
  82. mcli/ml/configs/dvc_config.py +14 -15
  83. mcli/ml/configs/mlflow_config.py +12 -13
  84. mcli/ml/configs/mlops_manager.py +19 -21
  85. mcli/ml/dashboard/__init__.py +4 -4
  86. mcli/ml/dashboard/app.py +20 -30
  87. mcli/ml/dashboard/app_supabase.py +16 -19
  88. mcli/ml/dashboard/app_training.py +11 -14
  89. mcli/ml/dashboard/cli.py +2 -2
  90. mcli/ml/dashboard/common.py +2 -3
  91. mcli/ml/dashboard/components/__init__.py +1 -1
  92. mcli/ml/dashboard/components/charts.py +13 -11
  93. mcli/ml/dashboard/components/metrics.py +7 -7
  94. mcli/ml/dashboard/components/tables.py +12 -9
  95. mcli/ml/dashboard/overview.py +2 -2
  96. mcli/ml/dashboard/pages/__init__.py +1 -1
  97. mcli/ml/dashboard/pages/cicd.py +15 -18
  98. mcli/ml/dashboard/pages/debug_dependencies.py +7 -7
  99. mcli/ml/dashboard/pages/monte_carlo_predictions.py +11 -18
  100. mcli/ml/dashboard/pages/predictions_enhanced.py +24 -32
  101. mcli/ml/dashboard/pages/scrapers_and_logs.py +22 -24
  102. mcli/ml/dashboard/pages/test_portfolio.py +3 -6
  103. mcli/ml/dashboard/pages/trading.py +16 -18
  104. mcli/ml/dashboard/pages/workflows.py +20 -30
  105. mcli/ml/dashboard/utils.py +9 -9
  106. mcli/ml/dashboard/warning_suppression.py +3 -3
  107. mcli/ml/data_ingestion/__init__.py +1 -1
  108. mcli/ml/data_ingestion/api_connectors.py +41 -46
  109. mcli/ml/data_ingestion/data_pipeline.py +36 -46
  110. mcli/ml/data_ingestion/stream_processor.py +43 -46
  111. mcli/ml/database/__init__.py +1 -1
  112. mcli/ml/database/migrations/env.py +2 -2
  113. mcli/ml/database/models.py +22 -24
  114. mcli/ml/database/session.py +14 -14
  115. mcli/ml/experimentation/__init__.py +1 -1
  116. mcli/ml/experimentation/ab_testing.py +45 -46
  117. mcli/ml/features/__init__.py +1 -1
  118. mcli/ml/features/ensemble_features.py +22 -27
  119. mcli/ml/features/recommendation_engine.py +30 -30
  120. mcli/ml/features/stock_features.py +29 -32
  121. mcli/ml/features/test_feature_engineering.py +10 -11
  122. mcli/ml/logging.py +4 -4
  123. mcli/ml/mlops/__init__.py +1 -1
  124. mcli/ml/mlops/data_versioning.py +29 -30
  125. mcli/ml/mlops/experiment_tracker.py +24 -24
  126. mcli/ml/mlops/model_serving.py +31 -34
  127. mcli/ml/mlops/pipeline_orchestrator.py +27 -35
  128. mcli/ml/models/__init__.py +5 -6
  129. mcli/ml/models/base_models.py +23 -23
  130. mcli/ml/models/ensemble_models.py +31 -31
  131. mcli/ml/models/recommendation_models.py +18 -19
  132. mcli/ml/models/test_models.py +14 -16
  133. mcli/ml/monitoring/__init__.py +1 -1
  134. mcli/ml/monitoring/drift_detection.py +32 -36
  135. mcli/ml/monitoring/metrics.py +2 -2
  136. mcli/ml/optimization/__init__.py +1 -1
  137. mcli/ml/optimization/optimize.py +1 -2
  138. mcli/ml/optimization/portfolio_optimizer.py +30 -32
  139. mcli/ml/predictions/__init__.py +1 -1
  140. mcli/ml/preprocessing/__init__.py +1 -1
  141. mcli/ml/preprocessing/data_cleaners.py +22 -23
  142. mcli/ml/preprocessing/feature_extractors.py +23 -26
  143. mcli/ml/preprocessing/ml_pipeline.py +23 -23
  144. mcli/ml/preprocessing/test_preprocessing.py +7 -8
  145. mcli/ml/scripts/populate_sample_data.py +0 -4
  146. mcli/ml/serving/serve.py +1 -2
  147. mcli/ml/tasks.py +17 -17
  148. mcli/ml/tests/test_integration.py +29 -30
  149. mcli/ml/tests/test_training_dashboard.py +21 -21
  150. mcli/ml/trading/__init__.py +1 -1
  151. mcli/ml/trading/migrations.py +5 -5
  152. mcli/ml/trading/models.py +21 -23
  153. mcli/ml/trading/paper_trading.py +16 -13
  154. mcli/ml/trading/risk_management.py +17 -18
  155. mcli/ml/trading/trading_service.py +25 -28
  156. mcli/ml/training/__init__.py +1 -1
  157. mcli/ml/training/train.py +0 -1
  158. mcli/public/oi/oi.py +1 -2
  159. mcli/self/completion_cmd.py +6 -10
  160. mcli/self/logs_cmd.py +19 -24
  161. mcli/self/migrate_cmd.py +22 -20
  162. mcli/self/redis_cmd.py +10 -11
  163. mcli/self/self_cmd.py +62 -18
  164. mcli/self/store_cmd.py +10 -12
  165. mcli/self/visual_cmd.py +9 -14
  166. mcli/self/zsh_cmd.py +2 -4
  167. mcli/workflow/daemon/async_command_database.py +23 -24
  168. mcli/workflow/daemon/async_process_manager.py +27 -29
  169. mcli/workflow/daemon/client.py +27 -33
  170. mcli/workflow/daemon/daemon.py +32 -36
  171. mcli/workflow/daemon/enhanced_daemon.py +24 -33
  172. mcli/workflow/daemon/process_cli.py +11 -12
  173. mcli/workflow/daemon/process_manager.py +23 -26
  174. mcli/workflow/daemon/test_daemon.py +4 -5
  175. mcli/workflow/dashboard/dashboard_cmd.py +0 -1
  176. mcli/workflow/doc_convert.py +15 -17
  177. mcli/workflow/gcloud/__init__.py +0 -1
  178. mcli/workflow/gcloud/gcloud.py +11 -8
  179. mcli/workflow/git_commit/ai_service.py +14 -15
  180. mcli/workflow/lsh_integration.py +9 -11
  181. mcli/workflow/model_service/client.py +26 -31
  182. mcli/workflow/model_service/download_and_run_efficient_models.py +10 -14
  183. mcli/workflow/model_service/lightweight_embedder.py +25 -35
  184. mcli/workflow/model_service/lightweight_model_server.py +26 -32
  185. mcli/workflow/model_service/lightweight_test.py +7 -10
  186. mcli/workflow/model_service/model_service.py +80 -91
  187. mcli/workflow/model_service/ollama_efficient_runner.py +14 -18
  188. mcli/workflow/model_service/openai_adapter.py +23 -23
  189. mcli/workflow/model_service/pdf_processor.py +21 -26
  190. mcli/workflow/model_service/test_efficient_runner.py +12 -16
  191. mcli/workflow/model_service/test_example.py +11 -13
  192. mcli/workflow/model_service/test_integration.py +3 -5
  193. mcli/workflow/model_service/test_new_features.py +7 -8
  194. mcli/workflow/notebook/converter.py +1 -1
  195. mcli/workflow/notebook/notebook_cmd.py +5 -6
  196. mcli/workflow/notebook/schema.py +0 -1
  197. mcli/workflow/notebook/validator.py +7 -3
  198. mcli/workflow/openai/openai.py +1 -2
  199. mcli/workflow/registry/registry.py +4 -1
  200. mcli/workflow/repo/repo.py +6 -7
  201. mcli/workflow/scheduler/cron_parser.py +16 -19
  202. mcli/workflow/scheduler/job.py +10 -10
  203. mcli/workflow/scheduler/monitor.py +15 -15
  204. mcli/workflow/scheduler/persistence.py +17 -18
  205. mcli/workflow/scheduler/scheduler.py +37 -38
  206. mcli/workflow/secrets/__init__.py +1 -1
  207. mcli/workflow/sync/test_cmd.py +0 -1
  208. mcli/workflow/wakatime/__init__.py +5 -9
  209. mcli/workflow/wakatime/wakatime.py +1 -2
  210. {mcli_framework-7.12.0.dist-info → mcli_framework-7.12.3.dist-info}/METADATA +1 -1
  211. mcli_framework-7.12.3.dist-info/RECORD +279 -0
  212. mcli_framework-7.12.0.dist-info/RECORD +0 -279
  213. {mcli_framework-7.12.0.dist-info → mcli_framework-7.12.3.dist-info}/WHEEL +0 -0
  214. {mcli_framework-7.12.0.dist-info → mcli_framework-7.12.3.dist-info}/entry_points.txt +0 -0
  215. {mcli_framework-7.12.0.dist-info → mcli_framework-7.12.3.dist-info}/licenses/LICENSE +0 -0
  216. {mcli_framework-7.12.0.dist-info → mcli_framework-7.12.3.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,5 @@
1
1
  import json
2
- import os
3
- import time
4
- from datetime import datetime
5
- from pathlib import Path
6
- from typing import Any, Dict, List, Optional, Union
2
+ from typing import Any, Dict, List, Optional
7
3
 
8
4
  import click
9
5
  import requests
@@ -14,7 +10,7 @@ logger = get_logger(__name__)
14
10
 
15
11
 
16
12
  class ModelServiceClient:
17
- """Client for interacting with the model service daemon"""
13
+ """Client for interacting with the model service daemon."""
18
14
 
19
15
  def __init__(self, base_url: str = "http://localhost:8000"):
20
16
  self.base_url = base_url.rstrip("/")
@@ -24,7 +20,7 @@ class ModelServiceClient:
24
20
  )
25
21
 
26
22
  def _make_request(self, method: str, endpoint: str, data: Optional[Dict] = None) -> Dict:
27
- """Make a request to the model service"""
23
+ """Make a request to the model service."""
28
24
  url = f"{self.base_url}{endpoint}"
29
25
 
30
26
  try:
@@ -54,15 +50,15 @@ class ModelServiceClient:
54
50
  raise RuntimeError(f"Request failed: {e}")
55
51
 
56
52
  def get_status(self) -> Dict[str, Any]:
57
- """Get service status"""
53
+ """Get service status."""
58
54
  return self._make_request("GET", "/")
59
55
 
60
56
  def get_health(self) -> Dict[str, Any]:
61
- """Get service health"""
57
+ """Get service health."""
62
58
  return self._make_request("GET", "/health")
63
59
 
64
60
  def list_models(self) -> List[Dict[str, Any]]:
65
- """List all available models"""
61
+ """List all available models."""
66
62
  return self._make_request("GET", "/models")
67
63
 
68
64
  def load_model(
@@ -77,7 +73,7 @@ class ModelServiceClient:
77
73
  top_p: float = 0.9,
78
74
  top_k: int = 50,
79
75
  ) -> str:
80
- """Load a new model"""
76
+ """Load a new model."""
81
77
  data = {
82
78
  "name": name,
83
79
  "model_type": model_type,
@@ -94,7 +90,7 @@ class ModelServiceClient:
94
90
  return result["model_id"]
95
91
 
96
92
  def unload_model(self, model_id: str) -> bool:
97
- """Unload a model"""
93
+ """Unload a model."""
98
94
  try:
99
95
  self._make_request("DELETE", f"/models/{model_id}")
100
96
  return True
@@ -102,7 +98,7 @@ class ModelServiceClient:
102
98
  return False
103
99
 
104
100
  def update_model(self, model_id: str, updates: Dict[str, Any]) -> bool:
105
- """Update model configuration"""
101
+ """Update model configuration."""
106
102
  try:
107
103
  self._make_request("PUT", f"/models/{model_id}", updates)
108
104
  return True
@@ -110,7 +106,7 @@ class ModelServiceClient:
110
106
  return False
111
107
 
112
108
  def remove_model(self, model_id: str) -> bool:
113
- """Remove a model from the database"""
109
+ """Remove a model from the database."""
114
110
  try:
115
111
  self._make_request("DELETE", f"/models/{model_id}/remove")
116
112
  return True
@@ -126,7 +122,7 @@ class ModelServiceClient:
126
122
  top_p: Optional[float] = None,
127
123
  top_k: Optional[int] = None,
128
124
  ) -> Dict[str, Any]:
129
- """Generate text using a model"""
125
+ """Generate text using a model."""
130
126
  data = {
131
127
  "prompt": prompt,
132
128
  "max_length": max_length,
@@ -141,14 +137,14 @@ class ModelServiceClient:
141
137
  return self._make_request("POST", f"/models/{model_id}/generate", data)
142
138
 
143
139
  def classify_text(self, model_id: str, text: str) -> Dict[str, Any]:
144
- """Classify text using a model"""
140
+ """Classify text using a model."""
145
141
  data = {"text": text}
146
142
  return self._make_request("POST", f"/models/{model_id}/classify", data)
147
143
 
148
144
  def translate_text(
149
145
  self, model_id: str, text: str, source_lang: str = "en", target_lang: str = "fr"
150
146
  ) -> Dict[str, Any]:
151
- """Translate text using a model"""
147
+ """Translate text using a model."""
152
148
  data = {"text": text, "source_lang": source_lang, "target_lang": target_lang}
153
149
  return self._make_request("POST", f"/models/{model_id}/translate", data)
154
150
 
@@ -156,14 +152,13 @@ class ModelServiceClient:
156
152
  # CLI Commands
157
153
  @click.group(name="model-client")
158
154
  def model_client():
159
- """Client for interacting with the model service daemon"""
160
- pass
155
+ """Client for interacting with the model service daemon."""
161
156
 
162
157
 
163
158
  @model_client.command()
164
159
  @click.option("--url", default="http://localhost:8000", help="Model service URL")
165
160
  def status(url: str):
166
- """Get model service status"""
161
+ """Get model service status."""
167
162
  try:
168
163
  client = ModelServiceClient(url)
169
164
  status_info = client.get_status()
@@ -187,7 +182,7 @@ def status(url: str):
187
182
  @model_client.command()
188
183
  @click.option("--url", default="http://localhost:8000", help="Model service URL")
189
184
  def list_models(url: str):
190
- """List all available models"""
185
+ """List all available models."""
191
186
  try:
192
187
  client = ModelServiceClient(url)
193
188
  models = client.list_models()
@@ -240,7 +235,7 @@ def load_model(
240
235
  top_p: float = 0.9,
241
236
  top_k: int = 50,
242
237
  ):
243
- """Load a model into the service"""
238
+ """Load a model into the service."""
244
239
  try:
245
240
  client = ModelServiceClient(url)
246
241
 
@@ -268,7 +263,7 @@ def load_model(
268
263
  @click.argument("model_id")
269
264
  @click.option("--url", default="http://localhost:8000", help="Model service URL")
270
265
  def unload_model(model_id: str, url: str):
271
- """Unload a model from the service"""
266
+ """Unload a model from the service."""
272
267
  try:
273
268
  client = ModelServiceClient(url)
274
269
 
@@ -303,7 +298,7 @@ def update_model(
303
298
  top_k: Optional[int] = None,
304
299
  device: Optional[str] = None,
305
300
  ):
306
- """Update model configuration"""
301
+ """Update model configuration."""
307
302
  try:
308
303
  client = ModelServiceClient(url)
309
304
 
@@ -350,7 +345,7 @@ def update_model(
350
345
  @click.option("--url", default="http://localhost:8000", help="Model service URL")
351
346
  @click.option("--force", is_flag=True, help="Force removal without confirmation")
352
347
  def remove_model(model_id: str, url: str, force: bool = False):
353
- """Remove a model from the database"""
348
+ """Remove a model from the database."""
354
349
  try:
355
350
  client = ModelServiceClient(url)
356
351
 
@@ -364,7 +359,7 @@ def remove_model(model_id: str, url: str, force: bool = False):
364
359
  break
365
360
 
366
361
  if model_info:
367
- click.echo(f"Model to remove:")
362
+ click.echo("Model to remove:")
368
363
  click.echo(f" Name: {model_info['name']}")
369
364
  click.echo(f" Type: {model_info['model_type']}")
370
365
  click.echo(f" Path: {model_info['model_path']}")
@@ -406,7 +401,7 @@ def generate(
406
401
  top_p: Optional[float] = None,
407
402
  top_k: Optional[int] = None,
408
403
  ):
409
- """Generate text using a model"""
404
+ """Generate text using a model."""
410
405
  try:
411
406
  client = ModelServiceClient(url)
412
407
 
@@ -437,7 +432,7 @@ def generate(
437
432
  @click.argument("text")
438
433
  @click.option("--url", default="http://localhost:8000", help="Model service URL")
439
434
  def classify(model_id: str, text: str, url: str):
440
- """Classify text using a model"""
435
+ """Classify text using a model."""
441
436
  try:
442
437
  client = ModelServiceClient(url)
443
438
 
@@ -465,7 +460,7 @@ def classify(model_id: str, text: str, url: str):
465
460
  @click.option("--source-lang", default="en", help="Source language")
466
461
  @click.option("--target-lang", default="fr", help="Target language")
467
462
  def translate(model_id: str, text: str, url: str, source_lang: str = "en", target_lang: str = "fr"):
468
- """Translate text using a model"""
463
+ """Translate text using a model."""
469
464
  try:
470
465
  client = ModelServiceClient(url)
471
466
 
@@ -491,7 +486,7 @@ def translate(model_id: str, text: str, url: str, source_lang: str = "en", targe
491
486
  @click.option("--model-id", required=True, help="Model ID to test")
492
487
  @click.option("--prompt", default="Hello, how are you?", help="Test prompt")
493
488
  def test_model(url: str, model_id: str, prompt: str):
494
- """Test a model with a simple prompt"""
489
+ """Test a model with a simple prompt."""
495
490
  try:
496
491
  client = ModelServiceClient(url)
497
492
 
@@ -524,7 +519,7 @@ def batch_test(
524
519
  model_id: Optional[str] = None,
525
520
  output: Optional[str] = None,
526
521
  ):
527
- """Run batch tests on a model"""
522
+ """Run batch tests on a model."""
528
523
  try:
529
524
  client = ModelServiceClient(url)
530
525
 
@@ -6,21 +6,17 @@ This script identifies the most efficient models in terms of compute and accurac
6
6
  downloads them, and runs them using the MCLI model service.
7
7
  """
8
8
 
9
- import json
10
- import os
11
- import subprocess
12
9
  import sys
13
10
  import time
14
11
  from pathlib import Path
15
- from typing import Dict, List, Optional
12
+ from typing import Dict, Optional
16
13
 
17
14
  import click
18
- import requests
19
15
 
20
16
  # Add the parent directory to the path so we can import the model service
21
17
  sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
22
18
 
23
- from mcli.workflow.model_service.model_service import ModelManager, ModelService
19
+ from mcli.workflow.model_service.model_service import ModelService
24
20
 
25
21
  # Efficient models from Ollama search results
26
22
  EFFICIENT_MODELS = {
@@ -68,7 +64,7 @@ EFFICIENT_MODELS = {
68
64
 
69
65
 
70
66
  def get_system_info():
71
- """Get system information for model selection"""
67
+ """Get system information for model selection."""
72
68
  import psutil
73
69
 
74
70
  # Get CPU info
@@ -103,7 +99,7 @@ def get_system_info():
103
99
 
104
100
 
105
101
  def recommend_model(system_info: Dict) -> str:
106
- """Recommend the best model based on system capabilities"""
102
+ """Recommend the best model based on system capabilities."""
107
103
  print("🔍 Analyzing system capabilities...")
108
104
  print(f" CPU Cores: {system_info['cpu_count']}")
109
105
  print(f" CPU Frequency: {system_info['cpu_freq_mhz']:.0f} MHz")
@@ -127,7 +123,7 @@ def recommend_model(system_info: Dict) -> str:
127
123
 
128
124
 
129
125
  def download_and_setup_model(model_key: str, service: ModelService) -> Optional[str]:
130
- """Download and setup a model using the MCLI service"""
126
+ """Download and setup a model using the MCLI service."""
131
127
  model_info = EFFICIENT_MODELS[model_key]
132
128
 
133
129
  print(f"\n🚀 Setting up {model_info['name']}...")
@@ -159,7 +155,7 @@ def download_and_setup_model(model_key: str, service: ModelService) -> Optional[
159
155
 
160
156
 
161
157
  def test_model(service: ModelService, model_id: str, model_name: str):
162
- """Test the model with sample prompts"""
158
+ """Test the model with sample prompts."""
163
159
  print(f"\n🧪 Testing {model_name}...")
164
160
 
165
161
  test_prompts = [
@@ -190,7 +186,7 @@ def test_model(service: ModelService, model_id: str, model_name: str):
190
186
 
191
187
 
192
188
  def start_model_service():
193
- """Start the MCLI model service"""
189
+ """Start the MCLI model service."""
194
190
  print("🔧 Starting MCLI model service...")
195
191
 
196
192
  try:
@@ -233,7 +229,7 @@ def start_model_service():
233
229
  "--service-only", is_flag=True, help="Only start the model service without downloading models"
234
230
  )
235
231
  def main(model: Optional[str], auto: bool, test: bool, service_only: bool):
236
- """Download and run efficient models from Ollama using MCLI"""
232
+ """Download and run efficient models from Ollama using MCLI."""
237
233
 
238
234
  print("🚀 MCLI Efficient Model Runner")
239
235
  print("=" * 50)
@@ -278,8 +274,8 @@ def main(model: Optional[str], auto: bool, test: bool, service_only: bool):
278
274
 
279
275
  print(f"\n🎉 Setup complete! Model {EFFICIENT_MODELS[selected_model]['name']} is ready to use.")
280
276
  print(f"📊 Model ID: {model_id}")
281
- print(f"🌐 API available at: http://localhost:8000")
282
- print(f"📝 Use 'mcli model-service list-models' to see all models")
277
+ print("🌐 API available at: http://localhost:8000")
278
+ print("📝 Use 'mcli model-service list-models' to see all models")
283
279
 
284
280
  return 0
285
281
 
@@ -6,11 +6,8 @@ This module provides lightweight text embedding capabilities
6
6
  that don't require heavy ML libraries like PyTorch or transformers.
7
7
  """
8
8
 
9
- import hashlib
10
9
  import json
11
10
  import logging
12
- import os
13
- import sys
14
11
  from datetime import datetime
15
12
  from pathlib import Path
16
13
  from typing import Any, Dict, List, Optional
@@ -18,27 +15,21 @@ from typing import Any, Dict, List, Optional
18
15
  import numpy as np
19
16
 
20
17
  # Try to import lightweight alternatives
21
- try:
22
- import sentence_transformers
23
-
24
- HAS_SENTENCE_TRANSFORMERS = True
25
- except ImportError:
26
- HAS_SENTENCE_TRANSFORMERS = False
18
+ HAS_SENTENCE_TRANSFORMERS = False # Placeholder for future implementation
27
19
 
28
20
  try:
29
- import sklearn
30
21
  from sklearn.feature_extraction.text import TfidfVectorizer
31
- from sklearn.metrics.pairwise import cosine_similarity
32
22
 
33
23
  HAS_SKLEARN = True
34
24
  except ImportError:
35
25
  HAS_SKLEARN = False
26
+ TfidfVectorizer = None # type: ignore
36
27
 
37
28
  logger = logging.getLogger(__name__)
38
29
 
39
30
 
40
31
  class LightweightEmbedder:
41
- """Lightweight text embedder with multiple fallback methods"""
32
+ """Lightweight text embedder with multiple fallback methods."""
42
33
 
43
34
  def __init__(self, models_dir: str = "./models/embeddings"):
44
35
  self.models_dir = Path(models_dir)
@@ -47,23 +38,23 @@ class LightweightEmbedder:
47
38
  self.embedding_cache = {}
48
39
 
49
40
  def get_embedding_method(self) -> str:
50
- """Determine the best available embedding method"""
41
+ """Determine the best available embedding method."""
51
42
  if HAS_SENTENCE_TRANSFORMERS:
52
43
  return "sentence_transformers"
53
44
  elif HAS_SKLEARN:
54
- return "tfidf"
45
+ return "tfid"
55
46
  else:
56
47
  return "simple_hash"
57
48
 
58
49
  def embed_text(self, text: str, method: Optional[str] = None) -> Dict[str, Any]:
59
- """Embed text using the specified or best available method"""
50
+ """Embed text using the specified or best available method."""
60
51
  if not method:
61
52
  method = self.get_embedding_method()
62
53
 
63
54
  try:
64
55
  if method == "sentence_transformers":
65
56
  return self._embed_with_sentence_transformers(text)
66
- elif method == "tfidf":
57
+ elif method == "tfid":
67
58
  return self._embed_with_tfidf(text)
68
59
  else:
69
60
  return self._embed_with_simple_hash(text)
@@ -73,7 +64,7 @@ class LightweightEmbedder:
73
64
  return self._embed_with_simple_hash(text)
74
65
 
75
66
  def _embed_with_sentence_transformers(self, text: str) -> Dict[str, Any]:
76
- """Embed text using sentence-transformers"""
67
+ """Embed text using sentence-transformers."""
77
68
  try:
78
69
  from sentence_transformers import SentenceTransformer
79
70
 
@@ -97,9 +88,8 @@ class LightweightEmbedder:
97
88
  raise
98
89
 
99
90
  def _embed_with_tfidf(self, text: str) -> Dict[str, Any]:
100
- """Embed text using TF-IDF"""
91
+ """Embed text using TF-IDF."""
101
92
  try:
102
- from sklearn.feature_extraction.text import TfidfVectorizer
103
93
 
104
94
  # Create or reuse vectorizer
105
95
  if self.vectorizer is None:
@@ -114,8 +104,8 @@ class LightweightEmbedder:
114
104
  embedding = tfidf_vector.toarray()[0]
115
105
 
116
106
  return {
117
- "method": "tfidf",
118
- "model": "sklearn_tfidf",
107
+ "method": "tfid",
108
+ "model": "sklearn_tfid",
119
109
  "embedding": embedding.tolist(),
120
110
  "dimensions": len(embedding),
121
111
  "text_length": len(text),
@@ -126,7 +116,7 @@ class LightweightEmbedder:
126
116
  raise
127
117
 
128
118
  def _embed_with_simple_hash(self, text: str) -> Dict[str, Any]:
129
- """Embed text using simple hash-based method"""
119
+ """Embed text using simple hash-based method."""
130
120
  try:
131
121
  # Create a simple hash-based embedding
132
122
  words = text.lower().split()
@@ -165,7 +155,7 @@ class LightweightEmbedder:
165
155
  raise
166
156
 
167
157
  def chunk_text(self, text: str, chunk_size: int = 1000, overlap: int = 200) -> List[str]:
168
- """Split text into overlapping chunks"""
158
+ """Split text into overlapping chunks."""
169
159
  chunks = []
170
160
  start = 0
171
161
 
@@ -181,7 +171,7 @@ class LightweightEmbedder:
181
171
  return chunks
182
172
 
183
173
  def embed_document(self, text: str, chunk_size: int = 1000) -> Dict[str, Any]:
184
- """Embed a document by chunking and embedding each chunk"""
174
+ """Embed a document by chunking and embedding each chunk."""
185
175
  try:
186
176
  # Split text into chunks
187
177
  chunks = self.chunk_text(text, chunk_size)
@@ -222,7 +212,7 @@ class LightweightEmbedder:
222
212
  return {"success": False, "error": str(e)}
223
213
 
224
214
  def search_similar(self, query: str, embeddings: List[Dict], top_k: int = 5) -> List[Dict]:
225
- """Search for similar documents using embeddings"""
215
+ """Search for similar documents using embeddings."""
226
216
  try:
227
217
  # Embed the query
228
218
  query_embedding = self.embed_text(query)
@@ -263,11 +253,11 @@ class LightweightEmbedder:
263
253
  return []
264
254
 
265
255
  def get_status(self) -> Dict[str, Any]:
266
- """Get the status of the embedder"""
256
+ """Get the status of the embedder."""
267
257
  return {
268
258
  "available_methods": {
269
259
  "sentence_transformers": HAS_SENTENCE_TRANSFORMERS,
270
- "tfidf": HAS_SKLEARN,
260
+ "tfid": HAS_SKLEARN,
271
261
  "simple_hash": True, # Always available
272
262
  },
273
263
  "current_method": self.get_embedding_method(),
@@ -277,9 +267,9 @@ class LightweightEmbedder:
277
267
 
278
268
 
279
269
  def create_embedder_api():
280
- """Create a simple API for the embedder"""
270
+ """Create a simple API for the embedder."""
281
271
  import urllib.parse
282
- from http.server import BaseHTTPRequestHandler, HTTPServer
272
+ from http.server import BaseHTTPRequestHandler
283
273
 
284
274
  class EmbedderHandler(BaseHTTPRequestHandler):
285
275
  def __init__(self, *args, embedder=None, **kwargs):
@@ -287,7 +277,7 @@ def create_embedder_api():
287
277
  super().__init__(*args, **kwargs)
288
278
 
289
279
  def do_POST(self):
290
- """Handle embedding requests"""
280
+ """Handle embedding requests."""
291
281
  parsed_path = urllib.parse.urlparse(self.path)
292
282
  path = parsed_path.path
293
283
 
@@ -301,7 +291,7 @@ def create_embedder_api():
301
291
  self._send_response(404, {"error": "Endpoint not found"})
302
292
 
303
293
  def do_GET(self):
304
- """Handle status requests"""
294
+ """Handle status requests."""
305
295
  parsed_path = urllib.parse.urlparse(self.path)
306
296
  path = parsed_path.path
307
297
 
@@ -312,7 +302,7 @@ def create_embedder_api():
312
302
  self._send_response(404, {"error": "Endpoint not found"})
313
303
 
314
304
  def _handle_embed_text(self):
315
- """Handle text embedding requests"""
305
+ """Handle text embedding requests."""
316
306
  try:
317
307
  content_length = int(self.headers.get("Content-Length", 0))
318
308
  post_data = self.rfile.read(content_length)
@@ -332,7 +322,7 @@ def create_embedder_api():
332
322
  self._send_response(500, {"error": str(e)})
333
323
 
334
324
  def _handle_embed_document(self):
335
- """Handle document embedding requests"""
325
+ """Handle document embedding requests."""
336
326
  try:
337
327
  content_length = int(self.headers.get("Content-Length", 0))
338
328
  post_data = self.rfile.read(content_length)
@@ -352,7 +342,7 @@ def create_embedder_api():
352
342
  self._send_response(500, {"error": str(e)})
353
343
 
354
344
  def _handle_search(self):
355
- """Handle search requests"""
345
+ """Handle search requests."""
356
346
  try:
357
347
  content_length = int(self.headers.get("Content-Length", 0))
358
348
  post_data = self.rfile.read(content_length)
@@ -373,7 +363,7 @@ def create_embedder_api():
373
363
  self._send_response(500, {"error": str(e)})
374
364
 
375
365
  def _send_response(self, status_code, data):
376
- """Send JSON response"""
366
+ """Send JSON response."""
377
367
  self.send_response(status_code)
378
368
  self.send_header("Content-Type", "application/json")
379
369
  self.send_header("Access-Control-Allow-Origin", "*")