lollms-client 1.5.1__tar.gz → 1.5.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lollms_client-1.5.1/src/lollms_client.egg-info → lollms_client-1.5.2}/PKG-INFO +1 -1
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/__init__.py +1 -1
- lollms_client-1.5.2/src/lollms_client/llm_bindings/openwebui/__init__.py +303 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2/src/lollms_client.egg-info}/PKG-INFO +1 -1
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client.egg-info/SOURCES.txt +1 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/LICENSE +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/README.md +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/pyproject.toml +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/setup.cfg +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/assets/models_ctx_sizes.json +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/azure_openai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/claude/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/gemini/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/grok/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/groq/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/litellm/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/llamacpp/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/lollms/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/lollms_webui/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/mistral/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/novita_ai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/ollama/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/open_router/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/openai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/openllm/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/perplexity/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/pythonllamacpp/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/tensor_rt/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/transformers/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/vllm/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_agentic.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_config.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_core.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_discussion.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_js_analyzer.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_llm_binding.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_mcp_binding.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_mcp_security.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_personality.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_python_analyzer.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_stt_binding.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_tti_binding.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_ttm_binding.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_tts_binding.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_ttv_binding.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_types.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/lollms_utilities.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/local_mcp/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/local_mcp/default_tools/file_writer/file_writer.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/local_mcp/default_tools/generate_image_from_prompt/generate_image_from_prompt.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/local_mcp/default_tools/internet_search/internet_search.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/local_mcp/default_tools/python_interpreter/python_interpreter.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/remote_mcp/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/standard_mcp/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/stt_bindings/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/stt_bindings/lollms/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/stt_bindings/whisper/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/stt_bindings/whispercpp/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/diffusers/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/gemini/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/leonardo_ai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/lollms/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/novita_ai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/openai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/stability_ai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/audiocraft/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/beatoven_ai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/lollms/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/replicate/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/stability_ai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/topmediai/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/bark/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/bark/server/install_bark.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/bark/server/main.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/lollms/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/piper_tts/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/piper_tts/server/install_piper.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/piper_tts/server/main.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/piper_tts/server/setup_voices.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/xtts/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/xtts/server/main.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/xtts/server/setup_voices.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttv_bindings/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttv_bindings/lollms/__init__.py +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client.egg-info/dependency_links.txt +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client.egg-info/requires.txt +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client.egg-info/top_level.txt +0 -0
- {lollms_client-1.5.1 → lollms_client-1.5.2}/test/test_lollms_discussion.py +0 -0
|
@@ -8,7 +8,7 @@ from lollms_client.lollms_utilities import PromptReshaper # Keep general utiliti
|
|
|
8
8
|
from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
|
|
9
9
|
from lollms_client.lollms_llm_binding import LollmsLLMBindingManager
|
|
10
10
|
|
|
11
|
-
__version__ = "1.5.
|
|
11
|
+
__version__ = "1.5.2" # Updated version
|
|
12
12
|
|
|
13
13
|
# Optionally, you could define __all__ if you want to be explicit about exports
|
|
14
14
|
__all__ = [
|
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import json
|
|
3
|
+
import base64
|
|
4
|
+
import os
|
|
5
|
+
import mimetypes
|
|
6
|
+
import math
|
|
7
|
+
from typing import Optional, Callable, List, Union, Dict
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
import tiktoken
|
|
11
|
+
import pipmaster as pm
|
|
12
|
+
|
|
13
|
+
from lollms_client.lollms_llm_binding import LollmsLLMBinding
|
|
14
|
+
from lollms_client.lollms_types import MSG_TYPE, ELF_COMPLETION_FORMAT
|
|
15
|
+
from lollms_client.lollms_discussion import LollmsDiscussion
|
|
16
|
+
from lollms_client.lollms_utilities import encode_image
|
|
17
|
+
from ascii_colors import ASCIIColors, trace_exception
|
|
18
|
+
|
|
19
|
+
# Ensure required packages are installed
|
|
20
|
+
pm.ensure_packages(["httpx", "tiktoken"])
|
|
21
|
+
|
|
22
|
+
BindingName = "OpenWebUIBinding"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _read_file_as_base64(path):
|
|
26
|
+
with open(path, "rb") as f:
|
|
27
|
+
return base64.b64encode(f.read()).decode("utf-8")
|
|
28
|
+
|
|
29
|
+
def _extract_markdown_path(s):
|
|
30
|
+
s = s.strip()
|
|
31
|
+
if s.startswith("[") and s.endswith(")"):
|
|
32
|
+
lb, rb = s.find("["), s.find("]")
|
|
33
|
+
if lb != -1 and rb != -1 and rb > lb:
|
|
34
|
+
return s[lb+1:rb].strip()
|
|
35
|
+
return s
|
|
36
|
+
|
|
37
|
+
def _guess_mime_from_name(name, default="image/jpeg"):
|
|
38
|
+
mime, _ = mimetypes.guess_type(name)
|
|
39
|
+
return mime or default
|
|
40
|
+
|
|
41
|
+
def _to_data_url(b64_str, mime):
|
|
42
|
+
return f"data:{mime};base64,{b64_str}"
|
|
43
|
+
|
|
44
|
+
def normalize_image_input(img, default_mime="image/jpeg"):
|
|
45
|
+
"""
|
|
46
|
+
Returns an OpenAI API-ready content block for an image.
|
|
47
|
+
Accepts various input formats and converts them to a data URL.
|
|
48
|
+
"""
|
|
49
|
+
if isinstance(img, str):
|
|
50
|
+
# Handle path-like strings or raw base64
|
|
51
|
+
s = _extract_markdown_path(img)
|
|
52
|
+
if os.path.exists(s):
|
|
53
|
+
b64 = _read_file_as_base64(s)
|
|
54
|
+
mime = _guess_mime_from_name(s, default_mime)
|
|
55
|
+
url = _to_data_url(b64, mime)
|
|
56
|
+
else: # Assume it's a base64 string
|
|
57
|
+
url = _to_data_url(s, default_mime)
|
|
58
|
+
return {"type": "image_url", "image_url": {"url": url}}
|
|
59
|
+
|
|
60
|
+
raise ValueError("Unsupported image input type for OpenWebUI")
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class OpenWebUIBinding(LollmsLLMBinding):
|
|
64
|
+
"""OpenWebUI-specific binding implementation"""
|
|
65
|
+
|
|
66
|
+
def __init__(self, **kwargs):
|
|
67
|
+
"""
|
|
68
|
+
Initialize the OpenWebUI binding.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
host_address (str): The URL of the OpenWebUI server (e.g., "http://localhost:8080").
|
|
72
|
+
model_name (str): Name of the model to use.
|
|
73
|
+
service_key (str): Authentication token for the service.
|
|
74
|
+
verify_ssl_certificate (bool): Whether to verify SSL certificates.
|
|
75
|
+
"""
|
|
76
|
+
super().__init__(BindingName, **kwargs)
|
|
77
|
+
self.host_address = kwargs.get("host_address")
|
|
78
|
+
self.model_name = kwargs.get("model_name")
|
|
79
|
+
self.service_key = kwargs.get("service_key", os.getenv("OPENWEBUI_API_KEY"))
|
|
80
|
+
self.verify_ssl_certificate = kwargs.get("verify_ssl_certificate", True)
|
|
81
|
+
|
|
82
|
+
if not self.host_address:
|
|
83
|
+
raise ValueError("OpenWebUI host address is required.")
|
|
84
|
+
if not self.service_key:
|
|
85
|
+
ASCIIColors.warning("No service key provided for OpenWebUI. Requests may fail.")
|
|
86
|
+
|
|
87
|
+
headers = {
|
|
88
|
+
"Authorization": f"Bearer {self.service_key}",
|
|
89
|
+
"Content-Type": "application/json"
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
self.client = httpx.Client(
|
|
93
|
+
base_url=self.host_address,
|
|
94
|
+
headers=headers,
|
|
95
|
+
verify=self.verify_ssl_certificate,
|
|
96
|
+
timeout=None
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
def _build_request_params(self, messages: list, **kwargs) -> dict:
|
|
100
|
+
"""Builds the request parameters for the OpenWebUI API."""
|
|
101
|
+
params = {
|
|
102
|
+
"model": kwargs.get("model", self.model_name),
|
|
103
|
+
"messages": messages,
|
|
104
|
+
"stream": kwargs.get("stream", True),
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
# Map Lollms parameters to OpenAI-compatible parameters
|
|
108
|
+
if "n_predict" in kwargs and kwargs["n_predict"] is not None:
|
|
109
|
+
params["max_tokens"] = kwargs["n_predict"]
|
|
110
|
+
if "temperature" in kwargs and kwargs["temperature"] is not None:
|
|
111
|
+
params["temperature"] = kwargs["temperature"]
|
|
112
|
+
if "top_p" in kwargs and kwargs["top_p"] is not None:
|
|
113
|
+
params["top_p"] = kwargs["top_p"]
|
|
114
|
+
if "top_k" in kwargs and kwargs["top_k"] is not None:
|
|
115
|
+
# Note: top_k is not standard in OpenAI API, but some backends might support it.
|
|
116
|
+
# We include it here for potential compatibility.
|
|
117
|
+
params["top_k"] = kwargs["top_k"]
|
|
118
|
+
if "repeat_penalty" in kwargs and kwargs["repeat_penalty"] is not None:
|
|
119
|
+
params["frequency_penalty"] = kwargs["repeat_penalty"]
|
|
120
|
+
if "seed" in kwargs and kwargs["seed"] is not None:
|
|
121
|
+
params["seed"] = kwargs["seed"]
|
|
122
|
+
|
|
123
|
+
return params
|
|
124
|
+
|
|
125
|
+
def generate_text(self,
|
|
126
|
+
prompt: str,
|
|
127
|
+
images: Optional[List[str]] = None,
|
|
128
|
+
system_prompt: str = "",
|
|
129
|
+
n_predict: Optional[int] = None,
|
|
130
|
+
stream: Optional[bool] = None,
|
|
131
|
+
temperature: float = 0.7,
|
|
132
|
+
top_k: int = 40,
|
|
133
|
+
top_p: float = 0.9,
|
|
134
|
+
repeat_penalty: float = 1.1,
|
|
135
|
+
streaming_callback: Optional[Callable[[str, MSG_TYPE], None]] = None,
|
|
136
|
+
**kwargs
|
|
137
|
+
) -> Union[str, dict]:
|
|
138
|
+
|
|
139
|
+
messages = []
|
|
140
|
+
if system_prompt:
|
|
141
|
+
messages.append({"role": "system", "content": system_prompt})
|
|
142
|
+
|
|
143
|
+
user_content = [{"type": "text", "text": prompt}]
|
|
144
|
+
if images:
|
|
145
|
+
for img in images:
|
|
146
|
+
user_content.append(normalize_image_input(img))
|
|
147
|
+
|
|
148
|
+
messages.append({"role": "user", "content": user_content})
|
|
149
|
+
|
|
150
|
+
params = self._build_request_params(
|
|
151
|
+
messages=messages,
|
|
152
|
+
n_predict=n_predict,
|
|
153
|
+
stream=stream,
|
|
154
|
+
temperature=temperature,
|
|
155
|
+
top_k=top_k,
|
|
156
|
+
top_p=top_p,
|
|
157
|
+
repeat_penalty=repeat_penalty,
|
|
158
|
+
**kwargs
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
return self._process_request(params, stream, streaming_callback)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def generate_from_messages(self,
|
|
165
|
+
messages: List[Dict],
|
|
166
|
+
n_predict: Optional[int] = None,
|
|
167
|
+
stream: Optional[bool] = None,
|
|
168
|
+
temperature: Optional[float] = None,
|
|
169
|
+
top_k: Optional[int] = None,
|
|
170
|
+
top_p: Optional[float] = None,
|
|
171
|
+
repeat_penalty: Optional[float] = None,
|
|
172
|
+
streaming_callback: Optional[Callable[[str, MSG_TYPE], None]] = None,
|
|
173
|
+
**kwargs
|
|
174
|
+
) -> Union[str, dict]:
|
|
175
|
+
|
|
176
|
+
params = self._build_request_params(
|
|
177
|
+
messages=messages,
|
|
178
|
+
n_predict=n_predict,
|
|
179
|
+
stream=stream,
|
|
180
|
+
temperature=temperature,
|
|
181
|
+
top_k=top_k,
|
|
182
|
+
top_p=top_p,
|
|
183
|
+
repeat_penalty=repeat_penalty,
|
|
184
|
+
**kwargs
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
return self._process_request(params, stream, streaming_callback)
|
|
188
|
+
|
|
189
|
+
def _process_request(self, params, stream, streaming_callback):
|
|
190
|
+
"""Helper to process streaming or non-streaming API calls."""
|
|
191
|
+
output = ""
|
|
192
|
+
try:
|
|
193
|
+
if stream:
|
|
194
|
+
with self.client.stream("POST", "/api/chat/completions", json=params) as response:
|
|
195
|
+
if response.status_code != 200:
|
|
196
|
+
error_content = response.read().decode('utf-8')
|
|
197
|
+
raise Exception(f"API Error: {response.status_code} - {error_content}")
|
|
198
|
+
|
|
199
|
+
for line in response.iter_lines():
|
|
200
|
+
if line.startswith("data:"):
|
|
201
|
+
data_str = line[len("data:"):].strip()
|
|
202
|
+
if data_str == "[DONE]":
|
|
203
|
+
break
|
|
204
|
+
try:
|
|
205
|
+
chunk = json.loads(data_str)
|
|
206
|
+
if chunk["choices"]:
|
|
207
|
+
delta = chunk["choices"][0].get("delta", {})
|
|
208
|
+
word = delta.get("content", "")
|
|
209
|
+
if word:
|
|
210
|
+
if streaming_callback:
|
|
211
|
+
if not streaming_callback(word, MSG_TYPE.MSG_TYPE_CHUNK):
|
|
212
|
+
break
|
|
213
|
+
output += word
|
|
214
|
+
except json.JSONDecodeError:
|
|
215
|
+
continue # Ignore malformed SSE lines
|
|
216
|
+
else:
|
|
217
|
+
response = self.client.post("/api/chat/completions", json=params)
|
|
218
|
+
if response.status_code != 200:
|
|
219
|
+
raise Exception(f"API Error: {response.status_code} - {response.text}")
|
|
220
|
+
|
|
221
|
+
data = response.json()
|
|
222
|
+
output = data["choices"][0]["message"]["content"]
|
|
223
|
+
if streaming_callback:
|
|
224
|
+
streaming_callback(output, MSG_TYPE.MSG_TYPE_CHUNK)
|
|
225
|
+
|
|
226
|
+
except Exception as e:
|
|
227
|
+
trace_exception(e)
|
|
228
|
+
err_msg = f"An error occurred with the OpenWebUI API: {e}"
|
|
229
|
+
if streaming_callback:
|
|
230
|
+
streaming_callback(err_msg, MSG_TYPE.MSG_TYPE_EXCEPTION)
|
|
231
|
+
return {"status": "error", "message": err_msg}
|
|
232
|
+
|
|
233
|
+
return output
|
|
234
|
+
|
|
235
|
+
def listModels(self) -> List[Dict]:
|
|
236
|
+
models_info = []
|
|
237
|
+
try:
|
|
238
|
+
response = self.client.get("/api/models")
|
|
239
|
+
response.raise_for_status()
|
|
240
|
+
models_data = response.json().get("data", [])
|
|
241
|
+
|
|
242
|
+
for model in models_data:
|
|
243
|
+
models_info.append({
|
|
244
|
+
"model_name": model.get("id", "N/A"),
|
|
245
|
+
"owned_by": model.get("details", {}).get("family", "N/A"),
|
|
246
|
+
"created": model.get("modified_at", "N/A"),
|
|
247
|
+
# Assuming context length might be in details, though not guaranteed
|
|
248
|
+
"context_length": model.get("details", {}).get("parameter_size", "unknown"),
|
|
249
|
+
})
|
|
250
|
+
except Exception as e:
|
|
251
|
+
ASCIIColors.error(f"Failed to list models from OpenWebUI: {e}")
|
|
252
|
+
return models_info
|
|
253
|
+
|
|
254
|
+
def _get_encoding(self, model_name: str | None = None):
|
|
255
|
+
"""Uses tiktoken as a general-purpose tokenizer."""
|
|
256
|
+
try:
|
|
257
|
+
return tiktoken.encoding_for_model(model_name or self.model_name)
|
|
258
|
+
except KeyError:
|
|
259
|
+
return tiktoken.get_encoding("cl100k_base")
|
|
260
|
+
|
|
261
|
+
def tokenize(self, text: str) -> list[int]:
|
|
262
|
+
encoding = self._get_encoding()
|
|
263
|
+
return encoding.encode(text)
|
|
264
|
+
|
|
265
|
+
def detokenize(self, tokens: list[int]) -> str:
|
|
266
|
+
encoding = self._get_encoding()
|
|
267
|
+
return encoding.decode(tokens)
|
|
268
|
+
|
|
269
|
+
def count_tokens(self, text: str) -> int:
|
|
270
|
+
return len(self.tokenize(text))
|
|
271
|
+
|
|
272
|
+
def get_input_tokens_price(self, model_name: str | None = None) -> float:
|
|
273
|
+
return 0.0
|
|
274
|
+
|
|
275
|
+
def get_output_tokens_price(self, model_name: str | None = None) -> float:
|
|
276
|
+
return 0.0
|
|
277
|
+
|
|
278
|
+
def embed(self, text: str | list[str], **kwargs) -> list:
|
|
279
|
+
"""Get embeddings using Ollama's passthrough endpoint."""
|
|
280
|
+
embedding_model = kwargs.get("model", self.model_name)
|
|
281
|
+
is_single_input = isinstance(text, str)
|
|
282
|
+
input_texts = [text] if is_single_input else text
|
|
283
|
+
embeddings = []
|
|
284
|
+
|
|
285
|
+
try:
|
|
286
|
+
for t in input_texts:
|
|
287
|
+
payload = {"model": embedding_model, "prompt": t}
|
|
288
|
+
response = self.client.post("/ollama/api/embeddings", json=payload)
|
|
289
|
+
response.raise_for_status()
|
|
290
|
+
embedding_data = response.json().get("embedding")
|
|
291
|
+
if embedding_data:
|
|
292
|
+
embeddings.append(embedding_data)
|
|
293
|
+
|
|
294
|
+
return embeddings[0] if is_single_input and embeddings else embeddings
|
|
295
|
+
|
|
296
|
+
except Exception as e:
|
|
297
|
+
ASCIIColors.error(f"Failed to generate embeddings using model '{embedding_model}': {e}")
|
|
298
|
+
trace_exception(e)
|
|
299
|
+
return []
|
|
300
|
+
|
|
301
|
+
def load_model(self, model_name: str) -> bool:
|
|
302
|
+
self.model_name = model_name
|
|
303
|
+
return True
|
|
@@ -42,6 +42,7 @@ src/lollms_client/llm_bindings/ollama/__init__.py
|
|
|
42
42
|
src/lollms_client/llm_bindings/open_router/__init__.py
|
|
43
43
|
src/lollms_client/llm_bindings/openai/__init__.py
|
|
44
44
|
src/lollms_client/llm_bindings/openllm/__init__.py
|
|
45
|
+
src/lollms_client/llm_bindings/openwebui/__init__.py
|
|
45
46
|
src/lollms_client/llm_bindings/perplexity/__init__.py
|
|
46
47
|
src/lollms_client/llm_bindings/pythonllamacpp/__init__.py
|
|
47
48
|
src/lollms_client/llm_bindings/tensor_rt/__init__.py
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/azure_openai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/claude/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/gemini/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/litellm/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/llamacpp/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/lollms/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/lollms_webui/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/mistral/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/novita_ai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/ollama/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/open_router/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/openai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/openllm/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/perplexity/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/tensor_rt/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/llm_bindings/transformers/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/local_mcp/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/remote_mcp/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/mcp_bindings/standard_mcp/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/stt_bindings/lollms/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/stt_bindings/whisper/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/stt_bindings/whispercpp/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/diffusers/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/gemini/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/leonardo_ai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/lollms/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/novita_ai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/openai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tti_bindings/stability_ai/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/audiocraft/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/beatoven_ai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/lollms/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/replicate/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/stability_ai/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttm_bindings/topmediai/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/bark/server/main.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/lollms/__init__.py
RENAMED
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/piper_tts/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/piper_tts/server/main.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/tts_bindings/xtts/server/main.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lollms_client-1.5.1 → lollms_client-1.5.2}/src/lollms_client/ttv_bindings/lollms/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|