lollms-client 0.33.0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lollms-client might be problematic. Click here for more details.
- lollms_client/__init__.py +1 -1
- lollms_client/llm_bindings/azure_openai/__init__.py +6 -10
- lollms_client/llm_bindings/claude/__init__.py +4 -7
- lollms_client/llm_bindings/gemini/__init__.py +3 -7
- lollms_client/llm_bindings/grok/__init__.py +3 -7
- lollms_client/llm_bindings/groq/__init__.py +4 -6
- lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +4 -6
- lollms_client/llm_bindings/litellm/__init__.py +15 -6
- lollms_client/llm_bindings/llamacpp/__init__.py +27 -9
- lollms_client/llm_bindings/lollms/__init__.py +24 -14
- lollms_client/llm_bindings/lollms_webui/__init__.py +6 -12
- lollms_client/llm_bindings/mistral/__init__.py +3 -5
- lollms_client/llm_bindings/ollama/__init__.py +6 -11
- lollms_client/llm_bindings/open_router/__init__.py +4 -6
- lollms_client/llm_bindings/openai/__init__.py +7 -14
- lollms_client/llm_bindings/openllm/__init__.py +12 -12
- lollms_client/llm_bindings/pythonllamacpp/__init__.py +1 -1
- lollms_client/llm_bindings/tensor_rt/__init__.py +8 -13
- lollms_client/llm_bindings/transformers/__init__.py +14 -6
- lollms_client/llm_bindings/vllm/__init__.py +16 -12
- lollms_client/lollms_core.py +296 -487
- lollms_client/lollms_discussion.py +431 -78
- lollms_client/lollms_llm_binding.py +191 -380
- lollms_client/lollms_mcp_binding.py +33 -2
- lollms_client/mcp_bindings/local_mcp/__init__.py +3 -2
- lollms_client/mcp_bindings/remote_mcp/__init__.py +6 -5
- lollms_client/mcp_bindings/standard_mcp/__init__.py +3 -5
- lollms_client/stt_bindings/lollms/__init__.py +6 -8
- lollms_client/stt_bindings/whisper/__init__.py +2 -4
- lollms_client/stt_bindings/whispercpp/__init__.py +15 -16
- lollms_client/tti_bindings/dalle/__init__.py +29 -28
- lollms_client/tti_bindings/diffusers/__init__.py +25 -21
- lollms_client/tti_bindings/gemini/__init__.py +215 -0
- lollms_client/tti_bindings/lollms/__init__.py +8 -9
- lollms_client-1.0.0.dist-info/METADATA +1214 -0
- lollms_client-1.0.0.dist-info/RECORD +69 -0
- {lollms_client-0.33.0.dist-info → lollms_client-1.0.0.dist-info}/top_level.txt +0 -2
- examples/article_summary/article_summary.py +0 -58
- examples/console_discussion/console_app.py +0 -266
- examples/console_discussion.py +0 -448
- examples/deep_analyze/deep_analyse.py +0 -30
- examples/deep_analyze/deep_analyze_multiple_files.py +0 -32
- examples/function_calling_with_local_custom_mcp.py +0 -250
- examples/generate_a_benchmark_for_safe_store.py +0 -89
- examples/generate_and_speak/generate_and_speak.py +0 -251
- examples/generate_game_sfx/generate_game_fx.py +0 -240
- examples/generate_text_with_multihop_rag_example.py +0 -210
- examples/gradio_chat_app.py +0 -228
- examples/gradio_lollms_chat.py +0 -259
- examples/internet_search_with_rag.py +0 -226
- examples/lollms_chat/calculator.py +0 -59
- examples/lollms_chat/derivative.py +0 -48
- examples/lollms_chat/test_openai_compatible_with_lollms_chat.py +0 -12
- examples/lollms_discussions_test.py +0 -155
- examples/mcp_examples/external_mcp.py +0 -267
- examples/mcp_examples/local_mcp.py +0 -171
- examples/mcp_examples/openai_mcp.py +0 -203
- examples/mcp_examples/run_remote_mcp_example_v2.py +0 -290
- examples/mcp_examples/run_standard_mcp_example.py +0 -204
- examples/simple_text_gen_test.py +0 -173
- examples/simple_text_gen_with_image_test.py +0 -178
- examples/test_local_models/local_chat.py +0 -9
- examples/text_2_audio.py +0 -77
- examples/text_2_image.py +0 -144
- examples/text_2_image_diffusers.py +0 -274
- examples/text_and_image_2_audio.py +0 -59
- examples/text_gen.py +0 -30
- examples/text_gen_system_prompt.py +0 -29
- lollms_client-0.33.0.dist-info/METADATA +0 -854
- lollms_client-0.33.0.dist-info/RECORD +0 -101
- test/test_lollms_discussion.py +0 -368
- {lollms_client-0.33.0.dist-info → lollms_client-1.0.0.dist-info}/WHEEL +0 -0
- {lollms_client-0.33.0.dist-info → lollms_client-1.0.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -232,24 +232,28 @@ def resolve_hf_model_path(model_id_or_gguf_id: str, models_base_path: Path) -> P
|
|
|
232
232
|
# --- VLLM Binding Class ---
|
|
233
233
|
class VLLMBinding(LollmsLLMBinding):
|
|
234
234
|
def __init__(self,
|
|
235
|
-
models_folder: Optional[Union[str, Path]] = None,
|
|
236
|
-
model_name: str = "",
|
|
237
|
-
service_key: Optional[str] = None,
|
|
238
|
-
verify_ssl_certificate: bool = True,
|
|
239
|
-
default_completion_format: ELF_COMPLETION_FORMAT = ELF_COMPLETION_FORMAT.Chat,
|
|
240
235
|
**kwargs
|
|
241
236
|
):
|
|
237
|
+
"""Initializes the VLLM binding with the specified model and configuration.
|
|
238
|
+
Args:
|
|
239
|
+
models_folder (Optional[Union[str, Path]]): Path to the directory containing local models.²
|
|
240
|
+
model_name (str): Name or ID of the model to load. Can be a Hugging Face Hub ID or local folder name.
|
|
241
|
+
service_key (Optional[str]): Service key for authentication (not used in this binding).
|
|
242
|
+
verify_ssl_certificate (bool): Whether to verify SSL certificates (not used in this binding).
|
|
243
|
+
default_completion_format (ELF_COMPLETION_FORMAT): Default format for text generation.
|
|
244
|
+
"""
|
|
242
245
|
if not _vllm_deps_installed:
|
|
243
246
|
raise ImportError(f"vLLM or its dependencies not installed. Binding unusable. Error: {_vllm_installation_error}")
|
|
244
247
|
if engine_manager is None:
|
|
245
248
|
raise RuntimeError("VLLMEngineManager failed to initialize. Binding unusable.")
|
|
246
|
-
|
|
249
|
+
models_folder = kwargs.get("models_folder")
|
|
250
|
+
default_completion_format = kwargs.get("default_completion_format", ELF_COMPLETION_FORMAT.Chat)
|
|
247
251
|
_models_folder = Path(models_folder) if models_folder is not None else DEFAULT_models_folder
|
|
248
252
|
_models_folder.mkdir(parents=True, exist_ok=True)
|
|
249
253
|
|
|
250
|
-
super().__init__(BindingName)
|
|
251
|
-
self.models_folder= models_folder
|
|
252
|
-
self.model_name=model_name
|
|
254
|
+
super().__init__(BindingName, **kwargs)
|
|
255
|
+
self.models_folder= kwargs.get("models_folder")
|
|
256
|
+
self.model_name=kwargs.get("model_name")
|
|
253
257
|
self.default_completion_format=default_completion_format
|
|
254
258
|
|
|
255
259
|
|
|
@@ -261,11 +265,11 @@ class VLLMBinding(LollmsLLMBinding):
|
|
|
261
265
|
self.current_engine_params: Optional[Dict[str, Any]] = None
|
|
262
266
|
self.vllm_engine_kwargs_config = kwargs.copy()
|
|
263
267
|
|
|
264
|
-
if model_name:
|
|
268
|
+
if self.model_name:
|
|
265
269
|
try:
|
|
266
|
-
self.load_model(model_name)
|
|
270
|
+
self.load_model(self.model_name)
|
|
267
271
|
except Exception as e:
|
|
268
|
-
ASCIIColors.error(f"Auto-load model '{model_name}' failed: {e}")
|
|
272
|
+
ASCIIColors.error(f"Auto-load model '{self.model_name}' failed: {e}")
|
|
269
273
|
trace_exception(e)
|
|
270
274
|
|
|
271
275
|
def _get_vllm_engine_params_for_load(self) -> Dict[str, Any]:
|