lollms-client 0.33.0__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lollms-client might be problematic. Click here for more details.
- lollms_client/__init__.py +1 -1
- lollms_client/llm_bindings/azure_openai/__init__.py +6 -10
- lollms_client/llm_bindings/claude/__init__.py +4 -7
- lollms_client/llm_bindings/gemini/__init__.py +3 -7
- lollms_client/llm_bindings/grok/__init__.py +3 -7
- lollms_client/llm_bindings/groq/__init__.py +4 -6
- lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +4 -6
- lollms_client/llm_bindings/litellm/__init__.py +15 -6
- lollms_client/llm_bindings/llamacpp/__init__.py +27 -9
- lollms_client/llm_bindings/lollms/__init__.py +24 -14
- lollms_client/llm_bindings/lollms_webui/__init__.py +6 -12
- lollms_client/llm_bindings/mistral/__init__.py +3 -5
- lollms_client/llm_bindings/ollama/__init__.py +6 -11
- lollms_client/llm_bindings/open_router/__init__.py +4 -6
- lollms_client/llm_bindings/openai/__init__.py +7 -14
- lollms_client/llm_bindings/openllm/__init__.py +12 -12
- lollms_client/llm_bindings/pythonllamacpp/__init__.py +1 -1
- lollms_client/llm_bindings/tensor_rt/__init__.py +8 -13
- lollms_client/llm_bindings/transformers/__init__.py +14 -6
- lollms_client/llm_bindings/vllm/__init__.py +16 -12
- lollms_client/lollms_core.py +303 -490
- lollms_client/lollms_discussion.py +431 -78
- lollms_client/lollms_llm_binding.py +192 -381
- lollms_client/lollms_mcp_binding.py +33 -2
- lollms_client/lollms_tti_binding.py +107 -2
- lollms_client/mcp_bindings/local_mcp/__init__.py +3 -2
- lollms_client/mcp_bindings/remote_mcp/__init__.py +6 -5
- lollms_client/mcp_bindings/standard_mcp/__init__.py +3 -5
- lollms_client/stt_bindings/lollms/__init__.py +6 -8
- lollms_client/stt_bindings/whisper/__init__.py +2 -4
- lollms_client/stt_bindings/whispercpp/__init__.py +15 -16
- lollms_client/tti_bindings/dalle/__init__.py +50 -29
- lollms_client/tti_bindings/diffusers/__init__.py +227 -439
- lollms_client/tti_bindings/gemini/__init__.py +320 -0
- lollms_client/tti_bindings/lollms/__init__.py +8 -9
- lollms_client-1.1.0.dist-info/METADATA +1214 -0
- lollms_client-1.1.0.dist-info/RECORD +69 -0
- {lollms_client-0.33.0.dist-info → lollms_client-1.1.0.dist-info}/top_level.txt +0 -2
- examples/article_summary/article_summary.py +0 -58
- examples/console_discussion/console_app.py +0 -266
- examples/console_discussion.py +0 -448
- examples/deep_analyze/deep_analyse.py +0 -30
- examples/deep_analyze/deep_analyze_multiple_files.py +0 -32
- examples/function_calling_with_local_custom_mcp.py +0 -250
- examples/generate_a_benchmark_for_safe_store.py +0 -89
- examples/generate_and_speak/generate_and_speak.py +0 -251
- examples/generate_game_sfx/generate_game_fx.py +0 -240
- examples/generate_text_with_multihop_rag_example.py +0 -210
- examples/gradio_chat_app.py +0 -228
- examples/gradio_lollms_chat.py +0 -259
- examples/internet_search_with_rag.py +0 -226
- examples/lollms_chat/calculator.py +0 -59
- examples/lollms_chat/derivative.py +0 -48
- examples/lollms_chat/test_openai_compatible_with_lollms_chat.py +0 -12
- examples/lollms_discussions_test.py +0 -155
- examples/mcp_examples/external_mcp.py +0 -267
- examples/mcp_examples/local_mcp.py +0 -171
- examples/mcp_examples/openai_mcp.py +0 -203
- examples/mcp_examples/run_remote_mcp_example_v2.py +0 -290
- examples/mcp_examples/run_standard_mcp_example.py +0 -204
- examples/simple_text_gen_test.py +0 -173
- examples/simple_text_gen_with_image_test.py +0 -178
- examples/test_local_models/local_chat.py +0 -9
- examples/text_2_audio.py +0 -77
- examples/text_2_image.py +0 -144
- examples/text_2_image_diffusers.py +0 -274
- examples/text_and_image_2_audio.py +0 -59
- examples/text_gen.py +0 -30
- examples/text_gen_system_prompt.py +0 -29
- lollms_client-0.33.0.dist-info/METADATA +0 -854
- lollms_client-0.33.0.dist-info/RECORD +0 -101
- test/test_lollms_discussion.py +0 -368
- {lollms_client-0.33.0.dist-info → lollms_client-1.1.0.dist-info}/WHEEL +0 -0
- {lollms_client-0.33.0.dist-info → lollms_client-1.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -4,7 +4,7 @@ import importlib
|
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from typing import Optional, List, Dict, Any, Union
|
|
6
6
|
from ascii_colors import trace_exception
|
|
7
|
-
|
|
7
|
+
import yaml
|
|
8
8
|
class LollmsTTIBinding(ABC):
|
|
9
9
|
"""Abstract base class for all LOLLMS Text-to-Image bindings."""
|
|
10
10
|
|
|
@@ -58,6 +58,7 @@ class LollmsTTIBinding(ABC):
|
|
|
58
58
|
"""
|
|
59
59
|
pass
|
|
60
60
|
|
|
61
|
+
|
|
61
62
|
@abstractmethod
|
|
62
63
|
def get_settings(self, **kwargs) -> Optional[Dict[str, Any]]:
|
|
63
64
|
"""
|
|
@@ -73,6 +74,11 @@ class LollmsTTIBinding(ABC):
|
|
|
73
74
|
"""
|
|
74
75
|
pass
|
|
75
76
|
|
|
77
|
+
@abstractmethod
|
|
78
|
+
def listModels(self) -> list:
|
|
79
|
+
"""Lists models"""
|
|
80
|
+
pass
|
|
81
|
+
|
|
76
82
|
@abstractmethod
|
|
77
83
|
def set_settings(self, settings: Dict[str, Any], **kwargs) -> bool:
|
|
78
84
|
"""
|
|
@@ -139,7 +145,86 @@ class LollmsTTIBindingManager:
|
|
|
139
145
|
print(f"Failed to instantiate TTI binding {binding_name}: {str(e)}")
|
|
140
146
|
return None
|
|
141
147
|
return None
|
|
148
|
+
def _get_fallback_description(binding_name: str) -> Dict:
|
|
149
|
+
"""
|
|
150
|
+
Generates a default description dictionary for a binding without a description.yaml file.
|
|
151
|
+
"""
|
|
152
|
+
return {
|
|
153
|
+
"binding_name": binding_name,
|
|
154
|
+
"title": binding_name.replace("_", " ").title(),
|
|
155
|
+
"author": "Unknown",
|
|
156
|
+
"creation_date": "N/A",
|
|
157
|
+
"last_update_date": "N/A",
|
|
158
|
+
"description": f"A binding for {binding_name}. No description.yaml file was found, so common parameters are shown as a fallback.",
|
|
159
|
+
"input_parameters": [
|
|
160
|
+
{
|
|
161
|
+
"name": "model_name",
|
|
162
|
+
"type": "str",
|
|
163
|
+
"description": "The model name, ID, or filename to be used.",
|
|
164
|
+
"mandatory": False,
|
|
165
|
+
"default": ""
|
|
166
|
+
},
|
|
167
|
+
{
|
|
168
|
+
"name": "host_address",
|
|
169
|
+
"type": "str",
|
|
170
|
+
"description": "The host address of the service (for API-based bindings).",
|
|
171
|
+
"mandatory": False,
|
|
172
|
+
"default": ""
|
|
173
|
+
},
|
|
174
|
+
{
|
|
175
|
+
"name": "models_path",
|
|
176
|
+
"type": "str",
|
|
177
|
+
"description": "The path to the models directory (for local bindings).",
|
|
178
|
+
"mandatory": False,
|
|
179
|
+
"default": ""
|
|
180
|
+
},
|
|
181
|
+
{
|
|
182
|
+
"name": "service_key",
|
|
183
|
+
"type": "str",
|
|
184
|
+
"description": "The API key or service key for authentication (if applicable).",
|
|
185
|
+
"mandatory": False,
|
|
186
|
+
"default": ""
|
|
187
|
+
}
|
|
188
|
+
]
|
|
189
|
+
}
|
|
142
190
|
|
|
191
|
+
@staticmethod
|
|
192
|
+
def get_bindings_list(llm_bindings_dir: Union[str, Path]) -> List[Dict]:
|
|
193
|
+
"""
|
|
194
|
+
Lists all available LLM bindings by scanning a directory, loading their
|
|
195
|
+
description.yaml file if present, or providing a default description.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
llm_bindings_dir (Union[str, Path]): The path to the directory containing LLM binding folders.
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
List[Dict]: A list of dictionaries, each describing a binding.
|
|
202
|
+
"""
|
|
203
|
+
bindings_dir = Path(llm_bindings_dir)
|
|
204
|
+
if not bindings_dir.is_dir():
|
|
205
|
+
return []
|
|
206
|
+
|
|
207
|
+
bindings_list = []
|
|
208
|
+
for binding_folder in bindings_dir.iterdir():
|
|
209
|
+
if binding_folder.is_dir() and (binding_folder / "__init__.py").exists():
|
|
210
|
+
binding_name = binding_folder.name
|
|
211
|
+
description_file = binding_folder / "description.yaml"
|
|
212
|
+
|
|
213
|
+
binding_info = {}
|
|
214
|
+
if description_file.exists():
|
|
215
|
+
try:
|
|
216
|
+
with open(description_file, 'r', encoding='utf-8') as f:
|
|
217
|
+
binding_info = yaml.safe_load(f)
|
|
218
|
+
binding_info['binding_name'] = binding_name
|
|
219
|
+
except Exception as e:
|
|
220
|
+
print(f"Error loading description.yaml for {binding_name}: {e}")
|
|
221
|
+
binding_info = LollmsTTIBindingManager._get_fallback_description(binding_name)
|
|
222
|
+
else:
|
|
223
|
+
binding_info = LollmsTTIBindingManager._get_fallback_description(binding_name)
|
|
224
|
+
|
|
225
|
+
bindings_list.append(binding_info)
|
|
226
|
+
|
|
227
|
+
return sorted(bindings_list, key=lambda b: b.get('title', b['binding_name']))
|
|
143
228
|
def get_available_bindings(self) -> list[str]:
|
|
144
229
|
"""
|
|
145
230
|
Return list of available TTI binding names based on subdirectories.
|
|
@@ -148,4 +233,24 @@ class LollmsTTIBindingManager:
|
|
|
148
233
|
list[str]: List of binding names.
|
|
149
234
|
"""
|
|
150
235
|
return [binding_dir.name for binding_dir in self.tti_bindings_dir.iterdir()
|
|
151
|
-
if binding_dir.is_dir() and (binding_dir / "__init__.py").exists()]
|
|
236
|
+
if binding_dir.is_dir() and (binding_dir / "__init__.py").exists()]
|
|
237
|
+
|
|
238
|
+
def get_available_bindings(tti_bindings_dir: Union[str, Path] = None) -> List[Dict]:
|
|
239
|
+
"""
|
|
240
|
+
Lists all available LLM bindings with their detailed descriptions.
|
|
241
|
+
|
|
242
|
+
This function serves as a primary entry point for discovering what bindings
|
|
243
|
+
are available and how to configure them.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
llm_bindings_dir (Union[str, Path], optional):
|
|
247
|
+
The path to the LLM bindings directory. If None, it defaults to the
|
|
248
|
+
'llm_bindings' subdirectory relative to this file.
|
|
249
|
+
Defaults to None.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
List[Dict]: A list of dictionaries, each describing a binding.
|
|
253
|
+
"""
|
|
254
|
+
if tti_bindings_dir is None:
|
|
255
|
+
tti_bindings_dir = Path(__file__).parent / "tti_bindings"
|
|
256
|
+
return LollmsTTIBindingManager.get_bindings_list(tti_bindings_dir)
|
|
@@ -21,15 +21,16 @@ class LocalMCPBinding(LollmsMCPBinding):
|
|
|
21
21
|
"""
|
|
22
22
|
|
|
23
23
|
def __init__(self,
|
|
24
|
-
|
|
24
|
+
**kwargs: Any
|
|
25
|
+
):
|
|
25
26
|
"""
|
|
26
27
|
Initialize the LocalMCPBinding.
|
|
27
28
|
|
|
28
29
|
Args:
|
|
29
|
-
binding_name (str): The name of this binding.
|
|
30
30
|
tools_folder_path (str|Path) a folder where to find tools
|
|
31
31
|
"""
|
|
32
32
|
super().__init__(binding_name="LocalMCP")
|
|
33
|
+
tools_folder_path = kwargs.get("tools_folder_path")
|
|
33
34
|
if tools_folder_path:
|
|
34
35
|
try:
|
|
35
36
|
self.tools_folder_path: Optional[Path] = Path(tools_folder_path)
|
|
@@ -27,8 +27,8 @@ class RemoteMCPBinding(LollmsMCPBinding):
|
|
|
27
27
|
Tools from all connected servers are aggregated and prefixed with the server's alias.
|
|
28
28
|
"""
|
|
29
29
|
def __init__(self,
|
|
30
|
-
|
|
31
|
-
|
|
30
|
+
**kwargs: Any
|
|
31
|
+
):
|
|
32
32
|
"""
|
|
33
33
|
Initializes the binding to connect to multiple MCP servers.
|
|
34
34
|
|
|
@@ -41,10 +41,11 @@ class RemoteMCPBinding(LollmsMCPBinding):
|
|
|
41
41
|
"main_server": {"server_url": "http://localhost:8787", "auth_config": {}},
|
|
42
42
|
"experimental_server": {"server_url": "http://test.server:9000"}
|
|
43
43
|
}
|
|
44
|
-
**
|
|
44
|
+
**kwargs (Any): Additional configuration parameters.
|
|
45
45
|
"""
|
|
46
46
|
super().__init__(binding_name="remote_mcp")
|
|
47
47
|
# initialization in case no servers are present
|
|
48
|
+
servers_infos: Dict[str, Dict[str, Any]] = kwargs.get("servers_infos", {})
|
|
48
49
|
self.servers = None
|
|
49
50
|
if not MCP_LIBRARY_AVAILABLE:
|
|
50
51
|
ASCIIColors.error(f"{self.binding_name}: MCP library not available. This binding will be disabled.")
|
|
@@ -56,8 +57,8 @@ class RemoteMCPBinding(LollmsMCPBinding):
|
|
|
56
57
|
|
|
57
58
|
### NEW: Store the overall configuration
|
|
58
59
|
self.config = {
|
|
59
|
-
"servers_infos": servers_infos,
|
|
60
|
-
**
|
|
60
|
+
"servers_infos": kwargs.get("servers_infos"),
|
|
61
|
+
**kwargs
|
|
61
62
|
}
|
|
62
63
|
|
|
63
64
|
### NEW: State management for multiple servers.
|
|
@@ -48,12 +48,10 @@ class StandardMCPBinding(LollmsMCPBinding):
|
|
|
48
48
|
"""
|
|
49
49
|
|
|
50
50
|
def __init__(self,
|
|
51
|
-
|
|
52
|
-
**other_config_params: Any):
|
|
51
|
+
**kwargs: Any):
|
|
53
52
|
super().__init__(binding_name="standard_mcp")
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
self.config.update(other_config_params)
|
|
53
|
+
self.config = kwargs
|
|
54
|
+
initial_servers = kwargs.get("initial_servers", {})
|
|
57
55
|
|
|
58
56
|
self._server_configs: Dict[str, Dict[str, Any]] = {}
|
|
59
57
|
# Type hint with ClientSession, actual obj if MCP_LIBRARY_AVAILABLE
|
|
@@ -14,10 +14,8 @@ class LollmsSTTBinding_Impl(LollmsSTTBinding):
|
|
|
14
14
|
"""Concrete implementation of the LollmsSTTBinding for the standard LOLLMS server."""
|
|
15
15
|
|
|
16
16
|
def __init__(self,
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
service_key: Optional[str] = None,
|
|
20
|
-
verify_ssl_certificate: bool = True):
|
|
17
|
+
**kwargs
|
|
18
|
+
):
|
|
21
19
|
"""
|
|
22
20
|
Initialize the LOLLMS STT binding.
|
|
23
21
|
|
|
@@ -28,10 +26,10 @@ class LollmsSTTBinding_Impl(LollmsSTTBinding):
|
|
|
28
26
|
verify_ssl_certificate (bool): Whether to verify SSL certificates.
|
|
29
27
|
"""
|
|
30
28
|
super().__init__("lollms")
|
|
31
|
-
self.host_address=host_address
|
|
32
|
-
self.model_name=model_name
|
|
33
|
-
self.service_key=service_key
|
|
34
|
-
self.verify_ssl_certificate=verify_ssl_certificate
|
|
29
|
+
self.host_address=kwargs.get("host_address")
|
|
30
|
+
self.model_name=kwargs.get("model_name")
|
|
31
|
+
self.service_key=kwargs.get("service_key")
|
|
32
|
+
self.verify_ssl_certificate=kwargs.get("verify_ssl_certificate")
|
|
35
33
|
|
|
36
34
|
def transcribe_audio(self, audio_path: Union[str, Path], model: Optional[str] = None, **kwargs) -> str:
|
|
37
35
|
"""
|
|
@@ -70,8 +70,6 @@ class WhisperSTTBinding(LollmsSTTBinding):
|
|
|
70
70
|
WHISPER_MODEL_SIZES = ["tiny", "tiny.en", "base", "base.en", "small", "small.en", "medium", "medium.en", "large", "large-v1", "large-v2", "large-v3"]
|
|
71
71
|
|
|
72
72
|
def __init__(self,
|
|
73
|
-
model_name: str = "base", # Default Whisper model size
|
|
74
|
-
device: Optional[str] = None, # "cpu", "cuda", "mps", or None for auto
|
|
75
73
|
**kwargs # To catch any other LollmsSTTBinding standard args
|
|
76
74
|
):
|
|
77
75
|
"""
|
|
@@ -88,7 +86,7 @@ class WhisperSTTBinding(LollmsSTTBinding):
|
|
|
88
86
|
if not _whisper_installed:
|
|
89
87
|
raise ImportError(f"Whisper STT binding dependencies not met. Please ensure 'openai-whisper' and 'torch' are installed. Error: {_whisper_installation_error}")
|
|
90
88
|
|
|
91
|
-
self.device = device
|
|
89
|
+
self.device = kwargs.get("device",None)
|
|
92
90
|
if self.device is None: # Auto-detect if not specified
|
|
93
91
|
if torch.cuda.is_available():
|
|
94
92
|
self.device = "cuda"
|
|
@@ -101,7 +99,7 @@ class WhisperSTTBinding(LollmsSTTBinding):
|
|
|
101
99
|
|
|
102
100
|
self.loaded_model_name = None
|
|
103
101
|
self.model = None
|
|
104
|
-
self._load_whisper_model(model_name)
|
|
102
|
+
self._load_whisper_model(kwargs.get("model_name", "base")) # Default to "base" if not specified
|
|
105
103
|
|
|
106
104
|
|
|
107
105
|
def _load_whisper_model(self, model_name_to_load: str):
|
|
@@ -18,20 +18,19 @@ DEFAULT_WHISPERCPP_EXE_NAMES = ["main", "whisper-cli", "whisper"] # Common names
|
|
|
18
18
|
|
|
19
19
|
class WhisperCppSTTBinding(LollmsSTTBinding):
|
|
20
20
|
def __init__(self,
|
|
21
|
-
model_path: Union[str, Path], # Path to the GGUF Whisper model
|
|
22
|
-
whispercpp_exe_path: Optional[Union[str, Path]] = None, # Path to whisper.cpp executable
|
|
23
|
-
ffmpeg_path: Optional[Union[str, Path]] = None, # Path to ffmpeg executable (if not in PATH)
|
|
24
|
-
models_search_path: Optional[Union[str, Path]] = None, # Optional dir to scan for more models
|
|
25
|
-
default_language: str = "auto",
|
|
26
|
-
n_threads: int = 4,
|
|
27
|
-
# Catch LollmsSTTBinding standard args even if not directly used by this local binding
|
|
28
|
-
host_address: Optional[str] = None, # Not used for local binding
|
|
29
|
-
service_key: Optional[str] = None, # Not used for local binding
|
|
30
|
-
verify_ssl_certificate: bool = True, # Not used for local binding
|
|
31
21
|
**kwargs): # Catch-all for future compatibility or specific whisper.cpp params
|
|
32
22
|
|
|
33
|
-
super().__init__(binding_name="whispercpp")
|
|
34
|
-
|
|
23
|
+
super().__init__(binding_name="whispercpp")
|
|
24
|
+
|
|
25
|
+
# --- Extract values from kwargs with defaults ---
|
|
26
|
+
model_path = kwargs.get("model_path")
|
|
27
|
+
whispercpp_exe_path = kwargs.get("whispercpp_exe_path")
|
|
28
|
+
ffmpeg_path = kwargs.get("ffmpeg_path")
|
|
29
|
+
models_search_path = kwargs.get("models_search_path")
|
|
30
|
+
default_language = kwargs.get("default_language", "auto")
|
|
31
|
+
n_threads = kwargs.get("n_threads", 4)
|
|
32
|
+
extra_whisper_args = kwargs.get("extra_whisper_args", []) # e.g. ["--no-timestamps"]
|
|
33
|
+
|
|
35
34
|
# --- Validate FFMPEG ---
|
|
36
35
|
self.ffmpeg_exe = None
|
|
37
36
|
if ffmpeg_path:
|
|
@@ -42,7 +41,7 @@ class WhisperCppSTTBinding(LollmsSTTBinding):
|
|
|
42
41
|
raise FileNotFoundError(f"Provided ffmpeg_path '{ffmpeg_path}' not found or not executable.")
|
|
43
42
|
else:
|
|
44
43
|
self.ffmpeg_exe = shutil.which("ffmpeg")
|
|
45
|
-
|
|
44
|
+
|
|
46
45
|
if not self.ffmpeg_exe:
|
|
47
46
|
ASCIIColors.warning("ffmpeg not found in PATH or explicitly provided. Audio conversion will not be possible for non-WAV files or incompatible WAV files.")
|
|
48
47
|
ASCIIColors.warning("Please install ffmpeg and ensure it's in your system's PATH, or provide ffmpeg_path argument.")
|
|
@@ -63,7 +62,7 @@ class WhisperCppSTTBinding(LollmsSTTBinding):
|
|
|
63
62
|
self.whispercpp_exe = found_path
|
|
64
63
|
ASCIIColors.info(f"Found whisper.cpp executable via PATH: {self.whispercpp_exe}")
|
|
65
64
|
break
|
|
66
|
-
|
|
65
|
+
|
|
67
66
|
if not self.whispercpp_exe:
|
|
68
67
|
raise FileNotFoundError(
|
|
69
68
|
f"Whisper.cpp executable (tried: {', '.join(DEFAULT_WHISPERCPP_EXE_NAMES)}) not found in PATH or explicitly provided. "
|
|
@@ -79,11 +78,11 @@ class WhisperCppSTTBinding(LollmsSTTBinding):
|
|
|
79
78
|
self.model_path = Path(models_search_path, self.model_path).resolve()
|
|
80
79
|
else:
|
|
81
80
|
raise FileNotFoundError(f"Whisper GGUF model file not found at '{self.model_path}'. Also checked in models_search_path if applicable.")
|
|
82
|
-
|
|
81
|
+
|
|
83
82
|
self.models_search_path = Path(models_search_path).resolve() if models_search_path else None
|
|
84
83
|
self.default_language = default_language
|
|
85
84
|
self.n_threads = n_threads
|
|
86
|
-
self.extra_whisper_args =
|
|
85
|
+
self.extra_whisper_args = extra_whisper_args
|
|
87
86
|
|
|
88
87
|
ASCIIColors.green(f"WhisperCppSTTBinding initialized with model: {self.model_path}")
|
|
89
88
|
|
|
@@ -35,22 +35,11 @@ DALLE_MODELS = {
|
|
|
35
35
|
"max_prompt_length": 4000 # Characters
|
|
36
36
|
}
|
|
37
37
|
}
|
|
38
|
-
|
|
39
38
|
class DalleTTIBinding_Impl(LollmsTTIBinding):
|
|
40
39
|
"""
|
|
41
40
|
Concrete implementation of LollmsTTIBinding for OpenAI's DALL-E API.
|
|
42
41
|
"""
|
|
43
|
-
|
|
44
|
-
def __init__(self,
|
|
45
|
-
api_key: Optional[str] = None, # Can be None to check env var
|
|
46
|
-
model_name: str = "dall-e-3", # Default to DALL-E 3
|
|
47
|
-
default_size: Optional[str] = None, # e.g. "1024x1024"
|
|
48
|
-
default_quality: Optional[str] = None, # "standard" or "hd" (DALL-E 3)
|
|
49
|
-
default_style: Optional[str] = None, # "vivid" or "natural" (DALL-E 3)
|
|
50
|
-
host_address: str = DALLE_API_HOST, # OpenAI API host
|
|
51
|
-
verify_ssl_certificate: bool = True,
|
|
52
|
-
**kwargs # To catch any other lollms_client specific params like service_key/client_id
|
|
53
|
-
):
|
|
42
|
+
def __init__(self, **kwargs):
|
|
54
43
|
"""
|
|
55
44
|
Initialize the DALL-E TTI binding.
|
|
56
45
|
|
|
@@ -70,44 +59,58 @@ class DalleTTIBinding_Impl(LollmsTTIBinding):
|
|
|
70
59
|
"""
|
|
71
60
|
super().__init__(binding_name="dalle")
|
|
72
61
|
|
|
73
|
-
|
|
62
|
+
# Extract parameters from kwargs, providing defaults
|
|
63
|
+
self.api_key = kwargs.get("service_key")
|
|
64
|
+
self.model_name = kwargs.get("model_name")
|
|
65
|
+
self.default_size = kwargs.get("default_size")
|
|
66
|
+
self.default_quality = kwargs.get("default_quality")
|
|
67
|
+
self.default_style = kwargs.get("default_style")
|
|
68
|
+
self.host_address = kwargs.get("host_address", DALLE_API_HOST) # Provide default
|
|
69
|
+
self.verify_ssl_certificate = kwargs.get("verify_ssl_certificate", True) # Provide default
|
|
70
|
+
|
|
71
|
+
# Resolve API key from kwargs or environment variable
|
|
72
|
+
resolved_api_key = self.api_key
|
|
74
73
|
if not resolved_api_key:
|
|
75
74
|
ASCIIColors.info(f"API key not provided directly, checking environment variable '{OPENAI_API_KEY_ENV_VAR}'...")
|
|
76
75
|
resolved_api_key = os.environ.get(OPENAI_API_KEY_ENV_VAR)
|
|
77
76
|
|
|
78
77
|
if not resolved_api_key:
|
|
79
78
|
raise ValueError(f"OpenAI API key is required. Provide it directly or set the '{OPENAI_API_KEY_ENV_VAR}' environment variable.")
|
|
80
|
-
|
|
79
|
+
|
|
81
80
|
self.api_key = resolved_api_key
|
|
82
|
-
self.host_address = host_address
|
|
83
|
-
self.verify_ssl_certificate = verify_ssl_certificate
|
|
84
81
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
82
|
+
# Model name validation
|
|
83
|
+
if not self.model_name:
|
|
84
|
+
ASCIIColors.warning("Model name is required.")
|
|
85
|
+
if self.model_name not in DALLE_MODELS:
|
|
86
|
+
ASCIIColors.warning(f"Unsupported DALL-E model: {self.model_name}. Supported models: {list(DALLE_MODELS.keys())}")
|
|
87
|
+
self.model_name = list(DALLE_MODELS.keys())[1]
|
|
88
|
+
ASCIIColors.warning(f"Defaulting to {self.model_name}")
|
|
89
|
+
|
|
89
90
|
model_props = DALLE_MODELS[self.model_name]
|
|
90
91
|
|
|
91
|
-
#
|
|
92
|
-
self.current_size = default_size or model_props["default_size"]
|
|
92
|
+
# Size
|
|
93
|
+
self.current_size = self.default_size or model_props["default_size"]
|
|
93
94
|
if self.current_size not in model_props["sizes"]:
|
|
94
95
|
raise ValueError(f"Unsupported size '{self.current_size}' for model '{self.model_name}'. Supported sizes: {model_props['sizes']}")
|
|
95
96
|
|
|
97
|
+
# Quality
|
|
96
98
|
if model_props["supports_quality"]:
|
|
97
|
-
self.current_quality = default_quality or model_props["default_quality"]
|
|
99
|
+
self.current_quality = self.default_quality or model_props["default_quality"]
|
|
98
100
|
if self.current_quality not in model_props["qualities"]:
|
|
99
101
|
raise ValueError(f"Unsupported quality '{self.current_quality}' for model '{self.model_name}'. Supported qualities: {model_props['qualities']}")
|
|
100
102
|
else:
|
|
101
|
-
self.current_quality = None
|
|
103
|
+
self.current_quality = None # Explicitly None if not supported
|
|
102
104
|
|
|
105
|
+
# Style
|
|
103
106
|
if model_props["supports_style"]:
|
|
104
|
-
self.current_style = default_style or model_props["default_style"]
|
|
107
|
+
self.current_style = self.default_style or model_props["default_style"]
|
|
105
108
|
if self.current_style not in model_props["styles"]:
|
|
106
109
|
raise ValueError(f"Unsupported style '{self.current_style}' for model '{self.model_name}'. Supported styles: {model_props['styles']}")
|
|
107
110
|
else:
|
|
108
|
-
self.current_style = None
|
|
109
|
-
|
|
110
|
-
#
|
|
111
|
+
self.current_style = None # Explicitly None if not supported
|
|
112
|
+
|
|
113
|
+
# Client ID
|
|
111
114
|
self.client_id = kwargs.get("service_key", kwargs.get("client_id", "dalle_client_user"))
|
|
112
115
|
|
|
113
116
|
|
|
@@ -430,4 +433,22 @@ class DalleTTIBinding_Impl(LollmsTTIBinding):
|
|
|
430
433
|
except Exception as e:
|
|
431
434
|
trace_exception(e)
|
|
432
435
|
ASCIIColors.error(f"Failed to apply settings due to an unexpected error: {e}")
|
|
433
|
-
return False
|
|
436
|
+
return False
|
|
437
|
+
|
|
438
|
+
def listModels(self) -> list:
|
|
439
|
+
"""Lists models"""
|
|
440
|
+
formatted_models=[
|
|
441
|
+
{
|
|
442
|
+
'model_name': "dall-e-2",
|
|
443
|
+
'display_name': "Dall-e 2",
|
|
444
|
+
'description': "Dalle 2 model",
|
|
445
|
+
'owned_by': 'openai'
|
|
446
|
+
},
|
|
447
|
+
{
|
|
448
|
+
'model_name': "dall-e-3",
|
|
449
|
+
'display_name': "Dall-e 3",
|
|
450
|
+
'description': "Dalle 3 model",
|
|
451
|
+
'owned_by': 'openai'
|
|
452
|
+
}
|
|
453
|
+
]
|
|
454
|
+
return formatted_models
|