lollms-client 0.33.0__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

Files changed (74) hide show
  1. lollms_client/__init__.py +1 -1
  2. lollms_client/llm_bindings/azure_openai/__init__.py +6 -10
  3. lollms_client/llm_bindings/claude/__init__.py +4 -7
  4. lollms_client/llm_bindings/gemini/__init__.py +3 -7
  5. lollms_client/llm_bindings/grok/__init__.py +3 -7
  6. lollms_client/llm_bindings/groq/__init__.py +4 -6
  7. lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +4 -6
  8. lollms_client/llm_bindings/litellm/__init__.py +15 -6
  9. lollms_client/llm_bindings/llamacpp/__init__.py +27 -9
  10. lollms_client/llm_bindings/lollms/__init__.py +24 -14
  11. lollms_client/llm_bindings/lollms_webui/__init__.py +6 -12
  12. lollms_client/llm_bindings/mistral/__init__.py +3 -5
  13. lollms_client/llm_bindings/ollama/__init__.py +6 -11
  14. lollms_client/llm_bindings/open_router/__init__.py +4 -6
  15. lollms_client/llm_bindings/openai/__init__.py +7 -14
  16. lollms_client/llm_bindings/openllm/__init__.py +12 -12
  17. lollms_client/llm_bindings/pythonllamacpp/__init__.py +1 -1
  18. lollms_client/llm_bindings/tensor_rt/__init__.py +8 -13
  19. lollms_client/llm_bindings/transformers/__init__.py +14 -6
  20. lollms_client/llm_bindings/vllm/__init__.py +16 -12
  21. lollms_client/lollms_core.py +303 -490
  22. lollms_client/lollms_discussion.py +431 -78
  23. lollms_client/lollms_llm_binding.py +192 -381
  24. lollms_client/lollms_mcp_binding.py +33 -2
  25. lollms_client/lollms_tti_binding.py +107 -2
  26. lollms_client/mcp_bindings/local_mcp/__init__.py +3 -2
  27. lollms_client/mcp_bindings/remote_mcp/__init__.py +6 -5
  28. lollms_client/mcp_bindings/standard_mcp/__init__.py +3 -5
  29. lollms_client/stt_bindings/lollms/__init__.py +6 -8
  30. lollms_client/stt_bindings/whisper/__init__.py +2 -4
  31. lollms_client/stt_bindings/whispercpp/__init__.py +15 -16
  32. lollms_client/tti_bindings/dalle/__init__.py +50 -29
  33. lollms_client/tti_bindings/diffusers/__init__.py +227 -439
  34. lollms_client/tti_bindings/gemini/__init__.py +320 -0
  35. lollms_client/tti_bindings/lollms/__init__.py +8 -9
  36. lollms_client-1.1.0.dist-info/METADATA +1214 -0
  37. lollms_client-1.1.0.dist-info/RECORD +69 -0
  38. {lollms_client-0.33.0.dist-info → lollms_client-1.1.0.dist-info}/top_level.txt +0 -2
  39. examples/article_summary/article_summary.py +0 -58
  40. examples/console_discussion/console_app.py +0 -266
  41. examples/console_discussion.py +0 -448
  42. examples/deep_analyze/deep_analyse.py +0 -30
  43. examples/deep_analyze/deep_analyze_multiple_files.py +0 -32
  44. examples/function_calling_with_local_custom_mcp.py +0 -250
  45. examples/generate_a_benchmark_for_safe_store.py +0 -89
  46. examples/generate_and_speak/generate_and_speak.py +0 -251
  47. examples/generate_game_sfx/generate_game_fx.py +0 -240
  48. examples/generate_text_with_multihop_rag_example.py +0 -210
  49. examples/gradio_chat_app.py +0 -228
  50. examples/gradio_lollms_chat.py +0 -259
  51. examples/internet_search_with_rag.py +0 -226
  52. examples/lollms_chat/calculator.py +0 -59
  53. examples/lollms_chat/derivative.py +0 -48
  54. examples/lollms_chat/test_openai_compatible_with_lollms_chat.py +0 -12
  55. examples/lollms_discussions_test.py +0 -155
  56. examples/mcp_examples/external_mcp.py +0 -267
  57. examples/mcp_examples/local_mcp.py +0 -171
  58. examples/mcp_examples/openai_mcp.py +0 -203
  59. examples/mcp_examples/run_remote_mcp_example_v2.py +0 -290
  60. examples/mcp_examples/run_standard_mcp_example.py +0 -204
  61. examples/simple_text_gen_test.py +0 -173
  62. examples/simple_text_gen_with_image_test.py +0 -178
  63. examples/test_local_models/local_chat.py +0 -9
  64. examples/text_2_audio.py +0 -77
  65. examples/text_2_image.py +0 -144
  66. examples/text_2_image_diffusers.py +0 -274
  67. examples/text_and_image_2_audio.py +0 -59
  68. examples/text_gen.py +0 -30
  69. examples/text_gen_system_prompt.py +0 -29
  70. lollms_client-0.33.0.dist-info/METADATA +0 -854
  71. lollms_client-0.33.0.dist-info/RECORD +0 -101
  72. test/test_lollms_discussion.py +0 -368
  73. {lollms_client-0.33.0.dist-info → lollms_client-1.1.0.dist-info}/WHEEL +0 -0
  74. {lollms_client-0.33.0.dist-info → lollms_client-1.1.0.dist-info}/licenses/LICENSE +0 -0
@@ -232,24 +232,28 @@ def resolve_hf_model_path(model_id_or_gguf_id: str, models_base_path: Path) -> P
232
232
  # --- VLLM Binding Class ---
233
233
  class VLLMBinding(LollmsLLMBinding):
234
234
  def __init__(self,
235
- models_folder: Optional[Union[str, Path]] = None,
236
- model_name: str = "",
237
- service_key: Optional[str] = None,
238
- verify_ssl_certificate: bool = True,
239
- default_completion_format: ELF_COMPLETION_FORMAT = ELF_COMPLETION_FORMAT.Chat,
240
235
  **kwargs
241
236
  ):
237
+ """Initializes the VLLM binding with the specified model and configuration.
238
+ Args:
239
+ models_folder (Optional[Union[str, Path]]): Path to the directory containing local models.²
240
+ model_name (str): Name or ID of the model to load. Can be a Hugging Face Hub ID or local folder name.
241
+ service_key (Optional[str]): Service key for authentication (not used in this binding).
242
+ verify_ssl_certificate (bool): Whether to verify SSL certificates (not used in this binding).
243
+ default_completion_format (ELF_COMPLETION_FORMAT): Default format for text generation.
244
+ """
242
245
  if not _vllm_deps_installed:
243
246
  raise ImportError(f"vLLM or its dependencies not installed. Binding unusable. Error: {_vllm_installation_error}")
244
247
  if engine_manager is None:
245
248
  raise RuntimeError("VLLMEngineManager failed to initialize. Binding unusable.")
246
-
249
+ models_folder = kwargs.get("models_folder")
250
+ default_completion_format = kwargs.get("default_completion_format", ELF_COMPLETION_FORMAT.Chat)
247
251
  _models_folder = Path(models_folder) if models_folder is not None else DEFAULT_models_folder
248
252
  _models_folder.mkdir(parents=True, exist_ok=True)
249
253
 
250
- super().__init__(BindingName)
251
- self.models_folder= models_folder
252
- self.model_name=model_name
254
+ super().__init__(BindingName, **kwargs)
255
+ self.models_folder= kwargs.get("models_folder")
256
+ self.model_name=kwargs.get("model_name")
253
257
  self.default_completion_format=default_completion_format
254
258
 
255
259
 
@@ -261,11 +265,11 @@ class VLLMBinding(LollmsLLMBinding):
261
265
  self.current_engine_params: Optional[Dict[str, Any]] = None
262
266
  self.vllm_engine_kwargs_config = kwargs.copy()
263
267
 
264
- if model_name:
268
+ if self.model_name:
265
269
  try:
266
- self.load_model(model_name)
270
+ self.load_model(self.model_name)
267
271
  except Exception as e:
268
- ASCIIColors.error(f"Auto-load model '{model_name}' failed: {e}")
272
+ ASCIIColors.error(f"Auto-load model '{self.model_name}' failed: {e}")
269
273
  trace_exception(e)
270
274
 
271
275
  def _get_vllm_engine_params_for_load(self) -> Dict[str, Any]: