deploy-llm 0.0.3__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- deploy_llm-0.0.3/PKG-INFO +12 -0
- deploy_llm-0.0.3/deploy_llm.egg-info/PKG-INFO +12 -0
- deploy_llm-0.0.3/deploy_llm.egg-info/SOURCES.txt +10 -0
- deploy_llm-0.0.3/deploy_llm.egg-info/dependency_links.txt +1 -0
- deploy_llm-0.0.3/deploy_llm.egg-info/entry_points.txt +2 -0
- deploy_llm-0.0.3/deploy_llm.egg-info/requires.txt +3 -0
- deploy_llm-0.0.3/deploy_llm.egg-info/top_level.txt +1 -0
- deploy_llm-0.0.3/llmdeploy/__init__.py +0 -0
- deploy_llm-0.0.3/llmdeploy/cli.py +46 -0
- deploy_llm-0.0.3/llmdeploy/model_manager.py +102 -0
- deploy_llm-0.0.3/setup.cfg +4 -0
- deploy_llm-0.0.3/setup.py +25 -0
@@ -0,0 +1,12 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: deploy-llm
|
3
|
+
Version: 0.0.3
|
4
|
+
Summary: CLI tool for deploying and running models from Ollama and Hugging Face
|
5
|
+
Author: Ankit Gupta
|
6
|
+
Classifier: Programming Language :: Python :: 3
|
7
|
+
Classifier: License :: OSI Approved :: MIT License
|
8
|
+
Classifier: Operating System :: OS Independent
|
9
|
+
Requires-Python: >=3.7
|
10
|
+
Requires-Dist: click
|
11
|
+
Requires-Dist: torch
|
12
|
+
Requires-Dist: transformers
|
@@ -0,0 +1,12 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: deploy-llm
|
3
|
+
Version: 0.0.3
|
4
|
+
Summary: CLI tool for deploying and running models from Ollama and Hugging Face
|
5
|
+
Author: Ankit Gupta
|
6
|
+
Classifier: Programming Language :: Python :: 3
|
7
|
+
Classifier: License :: OSI Approved :: MIT License
|
8
|
+
Classifier: Operating System :: OS Independent
|
9
|
+
Requires-Python: >=3.7
|
10
|
+
Requires-Dist: click
|
11
|
+
Requires-Dist: torch
|
12
|
+
Requires-Dist: transformers
|
@@ -0,0 +1,10 @@
|
|
1
|
+
setup.py
|
2
|
+
deploy_llm.egg-info/PKG-INFO
|
3
|
+
deploy_llm.egg-info/SOURCES.txt
|
4
|
+
deploy_llm.egg-info/dependency_links.txt
|
5
|
+
deploy_llm.egg-info/entry_points.txt
|
6
|
+
deploy_llm.egg-info/requires.txt
|
7
|
+
deploy_llm.egg-info/top_level.txt
|
8
|
+
llmdeploy/__init__.py
|
9
|
+
llmdeploy/cli.py
|
10
|
+
llmdeploy/model_manager.py
|
@@ -0,0 +1 @@
|
|
1
|
+
|
@@ -0,0 +1 @@
|
|
1
|
+
llmdeploy
|
File without changes
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import click
|
2
|
+
from llmdeploy.model_manager import deploy_model, list_models, list_deployed_models, remove_model, run_inference
|
3
|
+
|
4
|
+
@click.group()
|
5
|
+
def cli():
|
6
|
+
"""CLI tool to deploy and manage models from Ollama and Hugging Face."""
|
7
|
+
pass
|
8
|
+
|
9
|
+
@click.command()
|
10
|
+
@click.option('--model', required=True, help="Model name to deploy")
|
11
|
+
@click.option('--source', default="ollama", type=click.Choice(["ollama", "huggingface"]), help="Source of the model (default: ollama)")
|
12
|
+
def deploy(model, source):
|
13
|
+
"""Deploy a model from Ollama or Hugging Face."""
|
14
|
+
click.echo(deploy_model(model, source))
|
15
|
+
|
16
|
+
@click.command(name="list")
|
17
|
+
def list_cmd():
|
18
|
+
"""List all locally available models."""
|
19
|
+
click.echo(list_models())
|
20
|
+
|
21
|
+
@click.command(name="list-deployed")
|
22
|
+
def list_deployed_cmd():
|
23
|
+
"""List deployed models tracked by this tool."""
|
24
|
+
click.echo(list_deployed_models())
|
25
|
+
|
26
|
+
@click.command()
|
27
|
+
@click.option('--model', required=True, help="Model name to remove")
|
28
|
+
def remove(model):
|
29
|
+
"""Remove a deployed model."""
|
30
|
+
click.echo(remove_model(model))
|
31
|
+
|
32
|
+
@click.command()
|
33
|
+
@click.option('--model', required=True, help="Model name to query")
|
34
|
+
@click.option('--prompt', required=True, help="Prompt to send to the model")
|
35
|
+
def query(model, prompt):
|
36
|
+
"""Run inference on a model."""
|
37
|
+
click.echo(run_inference(model, prompt))
|
38
|
+
|
39
|
+
cli.add_command(deploy)
|
40
|
+
cli.add_command(list_cmd)
|
41
|
+
cli.add_command(list_deployed_cmd)
|
42
|
+
cli.add_command(remove)
|
43
|
+
cli.add_command(query)
|
44
|
+
|
45
|
+
if __name__ == "__main__":
|
46
|
+
cli()
|
@@ -0,0 +1,102 @@
|
|
1
|
+
import os
|
2
|
+
import json
|
3
|
+
import subprocess
|
4
|
+
import sys
|
5
|
+
|
6
|
+
# File to store deployed models
|
7
|
+
DEPLOYED_MODELS_FILE = "deployed_models.json"
|
8
|
+
|
9
|
+
def load_deployed_models():
|
10
|
+
"""Load deployed models from a JSON file."""
|
11
|
+
try:
|
12
|
+
with open(DEPLOYED_MODELS_FILE, "r") as file:
|
13
|
+
return json.load(file)
|
14
|
+
except (FileNotFoundError, json.JSONDecodeError):
|
15
|
+
return {}
|
16
|
+
|
17
|
+
def save_deployed_models(models):
|
18
|
+
"""Save deployed models to a JSON file."""
|
19
|
+
with open(DEPLOYED_MODELS_FILE, "w") as file:
|
20
|
+
json.dump(models, file, indent=4)
|
21
|
+
|
22
|
+
|
23
|
+
def deploy_model(real_model_name):
|
24
|
+
"""Deploy an Ollama model by pulling it via subprocess."""
|
25
|
+
print(f"š Deploying model '{real_model_name}'...\n")
|
26
|
+
subprocess.run(["ollama", "pull", real_model_name], check=True)
|
27
|
+
|
28
|
+
deployed_models = load_deployed_models()
|
29
|
+
deployed_models[real_model_name] = real_model_name # Store real model name
|
30
|
+
save_deployed_models(deployed_models)
|
31
|
+
|
32
|
+
print(f"\nā
Model '{real_model_name}' deployed successfully!")
|
33
|
+
|
34
|
+
|
35
|
+
def list_models():
|
36
|
+
"""List all locally available Ollama models."""
|
37
|
+
try:
|
38
|
+
result = subprocess.run(
|
39
|
+
["ollama", "list"], capture_output=True, text=True
|
40
|
+
)
|
41
|
+
|
42
|
+
if result.returncode != 0:
|
43
|
+
return f"ā Error fetching models: {result.stderr}"
|
44
|
+
|
45
|
+
return result.stdout.strip() or "š No models found."
|
46
|
+
except Exception as e:
|
47
|
+
return f"ā Error fetching models: {e}"
|
48
|
+
|
49
|
+
def list_deployed_models():
|
50
|
+
"""List models that were deployed using the CLI tool."""
|
51
|
+
deployed_models = load_deployed_models()
|
52
|
+
if not deployed_models:
|
53
|
+
return "š No deployed models found."
|
54
|
+
return "\n".join(f"{real} -> {installed}" for real, installed in deployed_models.items())
|
55
|
+
|
56
|
+
def remove_model(real_model_name):
|
57
|
+
"""Remove a deployed Ollama model from both the machine and tracking list."""
|
58
|
+
try:
|
59
|
+
deployed_models = load_deployed_models()
|
60
|
+
|
61
|
+
if real_model_name not in deployed_models:
|
62
|
+
return f"ā ļø Model '{real_model_name}' not found in tracking file."
|
63
|
+
|
64
|
+
installed_model_name = deployed_models[real_model_name]
|
65
|
+
|
66
|
+
# Remove the model using subprocess
|
67
|
+
process = subprocess.Popen(
|
68
|
+
["ollama", "rm", installed_model_name],
|
69
|
+
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
70
|
+
)
|
71
|
+
|
72
|
+
for line in iter(process.stdout.readline, ''):
|
73
|
+
sys.stdout.write(line)
|
74
|
+
sys.stdout.flush()
|
75
|
+
|
76
|
+
process.wait()
|
77
|
+
|
78
|
+
if process.returncode != 0:
|
79
|
+
return f"ā Error removing model: {process.stderr.read().strip()}"
|
80
|
+
|
81
|
+
# Remove from tracking list
|
82
|
+
del deployed_models[real_model_name]
|
83
|
+
save_deployed_models(deployed_models)
|
84
|
+
|
85
|
+
return f"\nā
Model '{real_model_name}' (stored as '{installed_model_name}') completely removed!"
|
86
|
+
except Exception as e:
|
87
|
+
return f"ā Error removing model: {e}"
|
88
|
+
|
89
|
+
def run_inference(model, prompt):
|
90
|
+
"""Run inference on a deployed Ollama model."""
|
91
|
+
try:
|
92
|
+
result = subprocess.run(
|
93
|
+
["ollama", "run", model, prompt],
|
94
|
+
capture_output=True, text=True
|
95
|
+
)
|
96
|
+
|
97
|
+
if result.returncode != 0:
|
98
|
+
return f"ā Error running inference: {result.stderr}"
|
99
|
+
|
100
|
+
return result.stdout.strip()
|
101
|
+
except Exception as e:
|
102
|
+
return f"ā Error running inference: {e}"
|
@@ -0,0 +1,25 @@
|
|
1
|
+
from setuptools import setup, find_packages
|
2
|
+
|
3
|
+
setup(
|
4
|
+
name="deploy-llm",
|
5
|
+
version="0.0.3",
|
6
|
+
packages=find_packages(),
|
7
|
+
install_requires=[
|
8
|
+
"click",
|
9
|
+
"torch",
|
10
|
+
"transformers"
|
11
|
+
],
|
12
|
+
entry_points={
|
13
|
+
"console_scripts": [
|
14
|
+
"llmdeploy=cli:cli",
|
15
|
+
],
|
16
|
+
},
|
17
|
+
author="Ankit Gupta",
|
18
|
+
description="CLI tool for deploying and running models from Ollama and Hugging Face",
|
19
|
+
classifiers=[
|
20
|
+
"Programming Language :: Python :: 3",
|
21
|
+
"License :: OSI Approved :: MIT License",
|
22
|
+
"Operating System :: OS Independent",
|
23
|
+
],
|
24
|
+
python_requires=">=3.7",
|
25
|
+
)
|