lollms-client 1.6.1__py3-none-any.whl → 1.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

Files changed (42) hide show
  1. lollms_client/__init__.py +1 -1
  2. lollms_client/llm_bindings/azure_openai/__init__.py +2 -2
  3. lollms_client/llm_bindings/claude/__init__.py +2 -2
  4. lollms_client/llm_bindings/gemini/__init__.py +2 -2
  5. lollms_client/llm_bindings/grok/__init__.py +2 -2
  6. lollms_client/llm_bindings/groq/__init__.py +2 -2
  7. lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +2 -2
  8. lollms_client/llm_bindings/litellm/__init__.py +1 -1
  9. lollms_client/llm_bindings/llamacpp/__init__.py +2 -2
  10. lollms_client/llm_bindings/lollms/__init__.py +1 -1
  11. lollms_client/llm_bindings/lollms_webui/__init__.py +1 -1
  12. lollms_client/llm_bindings/mistral/__init__.py +2 -2
  13. lollms_client/llm_bindings/novita_ai/__init__.py +2 -2
  14. lollms_client/llm_bindings/ollama/__init__.py +7 -4
  15. lollms_client/llm_bindings/open_router/__init__.py +2 -2
  16. lollms_client/llm_bindings/openai/__init__.py +1 -1
  17. lollms_client/llm_bindings/openllm/__init__.py +2 -2
  18. lollms_client/llm_bindings/openwebui/__init__.py +1 -1
  19. lollms_client/llm_bindings/perplexity/__init__.py +2 -2
  20. lollms_client/llm_bindings/pythonllamacpp/__init__.py +3 -3
  21. lollms_client/llm_bindings/tensor_rt/__init__.py +1 -1
  22. lollms_client/llm_bindings/transformers/__init__.py +4 -4
  23. lollms_client/llm_bindings/vllm/__init__.py +1 -1
  24. lollms_client/lollms_core.py +19 -1452
  25. lollms_client/lollms_llm_binding.py +1 -1
  26. lollms_client/lollms_tti_binding.py +1 -1
  27. lollms_client/lollms_tts_binding.py +15 -13
  28. lollms_client/tti_bindings/diffusers/__init__.py +276 -856
  29. lollms_client/tti_bindings/diffusers/server/main.py +730 -0
  30. lollms_client/tti_bindings/gemini/__init__.py +1 -1
  31. lollms_client/tti_bindings/leonardo_ai/__init__.py +1 -1
  32. lollms_client/tti_bindings/novita_ai/__init__.py +1 -1
  33. lollms_client/tti_bindings/stability_ai/__init__.py +1 -1
  34. lollms_client/tts_bindings/lollms/__init__.py +6 -1
  35. lollms_client/tts_bindings/piper_tts/__init__.py +1 -1
  36. lollms_client/tts_bindings/xtts/__init__.py +97 -38
  37. lollms_client/tts_bindings/xtts/server/main.py +288 -272
  38. {lollms_client-1.6.1.dist-info → lollms_client-1.6.4.dist-info}/METADATA +6 -3
  39. {lollms_client-1.6.1.dist-info → lollms_client-1.6.4.dist-info}/RECORD +42 -41
  40. {lollms_client-1.6.1.dist-info → lollms_client-1.6.4.dist-info}/WHEEL +0 -0
  41. {lollms_client-1.6.1.dist-info → lollms_client-1.6.4.dist-info}/licenses/LICENSE +0 -0
  42. {lollms_client-1.6.1.dist-info → lollms_client-1.6.4.dist-info}/top_level.txt +0 -0
lollms_client/__init__.py CHANGED
@@ -8,7 +8,7 @@ from lollms_client.lollms_utilities import PromptReshaper # Keep general utiliti
8
8
  from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
9
9
  from lollms_client.lollms_llm_binding import LollmsLLMBindingManager
10
10
 
11
- __version__ = "1.6.1" # Updated version
11
+ __version__ = "1.6.4" # Updated version
12
12
 
13
13
  # Optionally, you could define __all__ if you want to be explicit about exports
14
14
  __all__ = [
@@ -238,7 +238,7 @@ class AzureOpenAIBinding(LollmsLLMBinding):
238
238
  "supports_vision": True, # Assume modern deployments support vision
239
239
  }
240
240
 
241
- def listModels(self) -> List[Dict[str, str]]:
241
+ def list_models(self) -> List[Dict[str, str]]:
242
242
  """
243
243
  List Models is not supported via the Azure OpenAI API.
244
244
  Deployments are managed in the Azure Portal. This method returns an empty list.
@@ -280,7 +280,7 @@ if __name__ == '__main__':
280
280
 
281
281
  # --- List Models ---
282
282
  ASCIIColors.cyan("\n--- Listing Models ---")
283
- models = binding.listModels()
283
+ models = binding.list_models()
284
284
  if not models:
285
285
  ASCIIColors.green("Correctly returned an empty list for models, as expected for Azure.")
286
286
 
@@ -354,7 +354,7 @@ class ClaudeBinding(LollmsLLMBinding):
354
354
  "supports_vision": "claude-3" in self.model_name,
355
355
  }
356
356
 
357
- def listModels(self) -> List[Dict[str, str]]:
357
+ def list_models(self) -> List[Dict[str, str]]:
358
358
  """
359
359
  Lists available models from the Anthropic API.
360
360
  Caches the result to avoid repeated API calls.
@@ -451,7 +451,7 @@ if __name__ == '__main__':
451
451
 
452
452
  # --- List Models ---
453
453
  ASCIIColors.cyan("\n--- Listing Models (dynamic) ---")
454
- models = binding.listModels()
454
+ models = binding.list_models()
455
455
  if models:
456
456
  ASCIIColors.green(f"Found {len(models)} models.")
457
457
  for m in models:
@@ -353,7 +353,7 @@ class GeminiBinding(LollmsLLMBinding):
353
353
  "supports_vision": "vision" in self.model_name or "gemini-1.5" in self.model_name,
354
354
  }
355
355
 
356
- def listModels(self) -> List[Dict[str, str]]:
356
+ def list_models(self) -> List[Dict[str, str]]:
357
357
  """Lists available generative models from the Gemini service."""
358
358
  if not self.client:
359
359
  ASCIIColors.error("Gemini client not initialized. Cannot list models.")
@@ -407,7 +407,7 @@ if __name__ == '__main__':
407
407
 
408
408
  # --- List Models ---
409
409
  ASCIIColors.cyan("\n--- Listing Models ---")
410
- models = binding.listModels()
410
+ models = binding.list_models()
411
411
  if models:
412
412
  ASCIIColors.green(f"Found {len(models)} generative models. First 5:")
413
413
  for m in models[:5]:
@@ -365,7 +365,7 @@ class GrokBinding(LollmsLLMBinding):
365
365
  "supports_vision": "vision" in self.model_name or "grok-1.5" == self.model_name,
366
366
  }
367
367
 
368
- def listModels(self) -> List[Dict[str, str]]:
368
+ def list_models(self) -> List[Dict[str, str]]:
369
369
  """
370
370
  Lists available models from the xAI API.
371
371
  Caches the result to avoid repeated API calls.
@@ -444,7 +444,7 @@ if __name__ == '__main__':
444
444
 
445
445
  # --- List Models ---
446
446
  ASCIIColors.cyan("\n--- Listing Models (dynamic) ---")
447
- models = binding.listModels()
447
+ models = binding.list_models()
448
448
  if models:
449
449
  ASCIIColors.green(f"Found {len(models)} models.")
450
450
  for m in models:
@@ -179,7 +179,7 @@ class GroqBinding(LollmsLLMBinding):
179
179
  "supports_vision": False, # Groq models do not currently support vision
180
180
  }
181
181
 
182
- def listModels(self) -> List[Dict[str, str]]:
182
+ def list_models(self) -> List[Dict[str, str]]:
183
183
  """Lists available models from the Groq service."""
184
184
  if not self.client:
185
185
  ASCIIColors.error("Groq client not initialized. Cannot list models.")
@@ -229,7 +229,7 @@ if __name__ == '__main__':
229
229
 
230
230
  # --- List Models ---
231
231
  ASCIIColors.cyan("\n--- Listing Models ---")
232
- models = binding.listModels()
232
+ models = binding.list_models()
233
233
  if models:
234
234
  ASCIIColors.green(f"Found {len(models)} models on Groq. Available models:")
235
235
  for m in models:
@@ -196,7 +196,7 @@ class HuggingFaceInferenceAPIBinding(LollmsLLMBinding):
196
196
  "supports_vision": False, # Vision models use a different API call
197
197
  }
198
198
 
199
- def listModels(self) -> List[Dict[str, str]]:
199
+ def list_models(self) -> List[Dict[str, str]]:
200
200
  """Lists text-generation models from the Hugging Face Hub."""
201
201
  if not self.hf_api:
202
202
  ASCIIColors.error("HF API client not initialized. Cannot list models.")
@@ -252,7 +252,7 @@ if __name__ == '__main__':
252
252
 
253
253
  # --- List Models ---
254
254
  ASCIIColors.cyan("\n--- Listing Models ---")
255
- models = binding.listModels()
255
+ models = binding.list_models()
256
256
  if models:
257
257
  ASCIIColors.green(f"Successfully fetched {len(models)} text-generation models.")
258
258
  ASCIIColors.info("Top 5 most downloaded models:")
@@ -185,7 +185,7 @@ class LiteLLMBinding(LollmsLLMBinding):
185
185
  ASCIIColors.error(f"--- [LiteLLM Binding] Fallback method failed: {e}")
186
186
  return entries
187
187
 
188
- def listModels(self) -> List[Dict]:
188
+ def list_models(self) -> List[Dict]:
189
189
  url = f'{self.host_address}/model/info'
190
190
  headers = {'Authorization': f'Bearer {self.service_key}'}
191
191
  entries = []
@@ -628,7 +628,7 @@ class LlamaCppServerBinding(LollmsLLMBinding):
628
628
 
629
629
  if not model_to_load:
630
630
  self._scan_models()
631
- available_models = self.listModels()
631
+ available_models = self.list_models()
632
632
  if not available_models:
633
633
  ASCIIColors.error("No model specified and no GGUF models found in models path.")
634
634
  return False
@@ -964,7 +964,7 @@ class LlamaCppServerBinding(LollmsLLMBinding):
964
964
 
965
965
  ASCIIColors.info(f"Scanned {len(self._model_path_map)} models from {self.models_path}.")
966
966
 
967
- def listModels(self) -> List[Dict[str, Any]]:
967
+ def list_models(self) -> List[Dict[str, Any]]:
968
968
  self._scan_models()
969
969
  models_found = []
970
970
  for unique_name, model_path in self._model_path_map.items():
@@ -564,7 +564,7 @@ class LollmsBinding(LollmsLLMBinding):
564
564
  "model_name": self.model_name
565
565
  }
566
566
 
567
- def listModels(self) -> List[Dict]:
567
+ def list_models(self) -> List[Dict]:
568
568
  # Known context lengths
569
569
  known_context_lengths = {
570
570
  "gpt-4o": 128000,
@@ -375,7 +375,7 @@ class LollmsWebuiLLMBinding(LollmsLLMBinding):
375
375
  }
376
376
 
377
377
 
378
- def listModels(self) -> dict:
378
+ def list_models(self) -> dict:
379
379
  """Lists models"""
380
380
  url = f"{self.host_address}/list_models"
381
381
 
@@ -224,7 +224,7 @@ class MistralBinding(LollmsLLMBinding):
224
224
  "supports_vision": False, # Mistral API does not currently support vision
225
225
  }
226
226
 
227
- def listModels(self) -> List[Dict[str, str]]:
227
+ def list_models(self) -> List[Dict[str, str]]:
228
228
  """Lists available models from the Mistral service."""
229
229
  if not self.client:
230
230
  ASCIIColors.error("Mistral client not initialized. Cannot list models.")
@@ -273,7 +273,7 @@ if __name__ == '__main__':
273
273
 
274
274
  # --- List Models ---
275
275
  ASCIIColors.cyan("\n--- Listing Models ---")
276
- models = binding.listModels()
276
+ models = binding.list_models()
277
277
  if models:
278
278
  ASCIIColors.green(f"Found {len(models)} models on Mistral. Available models:")
279
279
  for m in models:
@@ -211,7 +211,7 @@ class NovitaAIBinding(LollmsLLMBinding):
211
211
  "supports_vision": False
212
212
  }
213
213
 
214
- def listModels(self) -> List[Dict[str, str]]:
214
+ def list_models(self) -> List[Dict[str, str]]:
215
215
  """
216
216
  Lists available models. Novita AI API does not have a models endpoint,
217
217
  so a hardcoded list from their documentation is returned.
@@ -242,7 +242,7 @@ if __name__ == '__main__':
242
242
 
243
243
  # --- List Models ---
244
244
  ASCIIColors.cyan("\n--- Listing Models (static list) ---")
245
- models = binding.listModels()
245
+ models = binding.list_models()
246
246
  if models:
247
247
  ASCIIColors.green(f"Found {len(models)} models.")
248
248
  for m in models:
@@ -595,7 +595,7 @@ class OllamaBinding(LollmsLLMBinding):
595
595
  "supports_vision": True # Many Ollama models (e.g. llava, bakllava) support vision
596
596
  }
597
597
 
598
- def listModels(self) -> List[Dict[str, str]]:
598
+ def list_models(self) -> List[Dict[str, str]]:
599
599
  """
600
600
  Lists available models from the Ollama service using the ollama-python library.
601
601
  The returned list of dictionaries matches the format of the original template.
@@ -621,10 +621,10 @@ class OllamaBinding(LollmsLLMBinding):
621
621
  })
622
622
  return model_info_list
623
623
  except ollama.ResponseError as e:
624
- ASCIIColors.error(f"Ollama API listModels ResponseError: {e.error or 'Unknown error'} (status code: {e.status_code}) from {self.host_address}")
624
+ ASCIIColors.error(f"Ollama API list_models ResponseError: {e.error or 'Unknown error'} (status code: {e.status_code}) from {self.host_address}")
625
625
  return []
626
626
  except ollama.RequestError as e: # Covers connection errors, timeouts during request
627
- ASCIIColors.error(f"Ollama API listModels RequestError: {str(e)} from {self.host_address}")
627
+ ASCIIColors.error(f"Ollama API list_models RequestError: {str(e)} from {self.host_address}")
628
628
  return []
629
629
  except Exception as ex:
630
630
  trace_exception(ex)
@@ -658,6 +658,9 @@ class OllamaBinding(LollmsLLMBinding):
658
658
  """
659
659
  if model_name is None:
660
660
  model_name = self.model_name
661
+ if not model_name:
662
+ ASCIIColors.warning("Model name not specified and no default model set.")
663
+ return None
661
664
 
662
665
  try:
663
666
  info = ollama.show(model_name)
@@ -813,7 +816,7 @@ if __name__ == '__main__':
813
816
 
814
817
  # --- List Models ---
815
818
  ASCIIColors.cyan("\n--- Listing Models ---")
816
- models = binding.listModels()
819
+ models = binding.list_models()
817
820
  if models:
818
821
  ASCIIColors.green(f"Found {len(models)} models. First 5:")
819
822
  for m in models[:5]:
@@ -227,7 +227,7 @@ class OpenRouterBinding(LollmsLLMBinding):
227
227
  "supports_vision": "Depends on the specific model selected. This generic binding does not support vision.",
228
228
  }
229
229
 
230
- def listModels(self) -> List[Dict[str, str]]:
230
+ def list_models(self) -> List[Dict[str, str]]:
231
231
  """Lists available models from the OpenRouter service."""
232
232
  if not self.client:
233
233
  ASCIIColors.error("OpenRouter client not initialized. Cannot list models.")
@@ -274,7 +274,7 @@ if __name__ == '__main__':
274
274
 
275
275
  # --- List Models ---
276
276
  ASCIIColors.cyan("\n--- Listing Models ---")
277
- models = binding.listModels()
277
+ models = binding.list_models()
278
278
  if models:
279
279
  ASCIIColors.green(f"Successfully fetched {len(models)} models from OpenRouter.")
280
280
  ASCIIColors.info("Sample of available models:")
@@ -648,7 +648,7 @@ class OpenAIBinding(LollmsLLMBinding):
648
648
  "model_name": self.model_name
649
649
  }
650
650
 
651
- def listModels(self) -> List[Dict]:
651
+ def list_models(self) -> List[Dict]:
652
652
  # Known context lengths
653
653
  known_context_lengths = {
654
654
  "gpt-4o": 128000,
@@ -344,7 +344,7 @@ class OpenLLMBinding(LollmsLLMBinding):
344
344
  "supports_vision": supports_vision # Highly dependent on the specific model served
345
345
  }
346
346
 
347
- def listModels(self) -> List[Dict[str, str]]:
347
+ def list_models(self) -> List[Dict[str, str]]:
348
348
  """
349
349
  Lists the model currently served by the connected OpenLLM instance.
350
350
  OpenLLM client connects to one model server at a time.
@@ -423,7 +423,7 @@ if __name__ == '__main__':
423
423
  ASCIIColors.info(f"Using OpenLLM client version: {openllm.__version__ if openllm else 'N/A'}")
424
424
 
425
425
  ASCIIColors.cyan("\n--- Listing Model (should be the one connected) ---")
426
- models = binding.listModels()
426
+ models = binding.list_models()
427
427
  if models:
428
428
  ASCIIColors.green(f"Connected model info:")
429
429
  for m in models:
@@ -232,7 +232,7 @@ class OpenWebUIBinding(LollmsLLMBinding):
232
232
 
233
233
  return output
234
234
 
235
- def listModels(self) -> List[Dict]:
235
+ def list_models(self) -> List[Dict]:
236
236
  models_info = []
237
237
  try:
238
238
  response = self.client.get("/api/models")
@@ -224,7 +224,7 @@ class PerplexityBinding(LollmsLLMBinding):
224
224
  "supports_structured_output": False
225
225
  }
226
226
 
227
- def listModels(self) -> List[Dict[str, str]]:
227
+ def list_models(self) -> List[Dict[str, str]]:
228
228
  """
229
229
  Lists available models. Perplexity API does not have a models endpoint,
230
230
  so a hardcoded list is returned.
@@ -255,7 +255,7 @@ if __name__ == '__main__':
255
255
 
256
256
  # --- List Models ---
257
257
  ASCIIColors.cyan("\n--- Listing Models (static list) ---")
258
- models = binding.listModels()
258
+ models = binding.list_models()
259
259
  if models:
260
260
  ASCIIColors.green(f"Found {len(models)} models.")
261
261
  for m in models:
@@ -422,7 +422,7 @@ class PythonLlamaCppBinding(LollmsLLMBinding):
422
422
  "config": self.llama_config
423
423
  }
424
424
 
425
- def listModels(self, force_rescan: bool = False) -> List[Dict[str, str]]: # type: ignore
425
+ def list_models(self, force_rescan: bool = False) -> List[Dict[str, str]]: # type: ignore
426
426
  """
427
427
  Lists available GGUF models.
428
428
 
@@ -528,10 +528,10 @@ if __name__ == '__main__':
528
528
 
529
529
  # --- List Models ---
530
530
  ASCIIColors.cyan("\n--- Listing Models (force_rescan=True) ---")
531
- model_list = active_binding.listModels(force_rescan=True)
531
+ model_list = active_binding.list_models(force_rescan=True)
532
532
  print(json.dumps(model_list, indent=2))
533
533
  assert len(model_list) == 2, "Model discovery failed to find all dummy models."
534
- assert any(m['loaded'] for m in model_list), "listModels did not correctly report a loaded model."
534
+ assert any(m['loaded'] for m in model_list), "list_models did not correctly report a loaded model."
535
535
 
536
536
 
537
537
  if is_dummy_model:
@@ -442,7 +442,7 @@ class VLLMBinding(LollmsLLMBinding):
442
442
  }
443
443
  return info
444
444
 
445
- def listModels(self) -> List[Dict[str, Any]]:
445
+ def list_models(self) -> List[Dict[str, Any]]:
446
446
  local_models = []
447
447
  if not self.models_folder.exists(): return []
448
448
  for item_path in self.models_folder.rglob('*'):
@@ -492,7 +492,7 @@ class HuggingFaceHubBinding(LollmsLLMBinding):
492
492
  # (unless using external libraries like outlines)
493
493
  return info
494
494
 
495
- def listModels(self) -> List[Dict[str, str]]:
495
+ def list_models(self) -> List[Dict[str, str]]:
496
496
  models_found = []
497
497
  unique_model_names = set()
498
498
 
@@ -549,8 +549,8 @@ if __name__ == '__main__':
549
549
  # Let's use a dummy path for models_path for Hub ID testing.
550
550
 
551
551
  # Adjust current_directory for local model testing if needed
552
- # For this test, we'll assume a Hub ID. `models_path` is where `listModels` would scan.
553
- test_models_parent_path = Path("./test_hf_models_dir") # Create a dummy for listModels scan
552
+ # For this test, we'll assume a Hub ID. `models_path` is where `list_models` would scan.
553
+ test_models_parent_path = Path("./test_hf_models_dir") # Create a dummy for list_models scan
554
554
  test_models_parent_path.mkdir(exist_ok=True)
555
555
 
556
556
  binding_config = {
@@ -582,7 +582,7 @@ if __name__ == '__main__':
582
582
  # To make this test useful, you could manually place a model folder in `test_hf_models_dir`
583
583
  # e.g., download "gpt2" and put it in `test_hf_models_dir/gpt2`
584
584
  # For now, it will likely be empty unless you do that.
585
- listed_models = active_binding.listModels()
585
+ listed_models = active_binding.list_models()
586
586
  if listed_models:
587
587
  ASCIIColors.green(f"Found {len(listed_models)} potential model folders. First 5:")
588
588
  for m in listed_models[:5]: print(m)
@@ -451,7 +451,7 @@ class VLLMBinding(LollmsLLMBinding):
451
451
  }
452
452
  return info
453
453
 
454
- def listModels(self) -> List[Dict[str, Any]]:
454
+ def list_models(self) -> List[Dict[str, Any]]:
455
455
  local_models = []
456
456
  if not self.models_folder.exists(): return []
457
457
  for item_path in self.models_folder.rglob('*'):