lollms-client 1.5.6__py3-none-any.whl → 1.7.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. lollms_client/__init__.py +1 -1
  2. lollms_client/llm_bindings/azure_openai/__init__.py +2 -2
  3. lollms_client/llm_bindings/claude/__init__.py +125 -35
  4. lollms_client/llm_bindings/gemini/__init__.py +261 -159
  5. lollms_client/llm_bindings/grok/__init__.py +52 -15
  6. lollms_client/llm_bindings/groq/__init__.py +2 -2
  7. lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +2 -2
  8. lollms_client/llm_bindings/litellm/__init__.py +1 -1
  9. lollms_client/llm_bindings/llama_cpp_server/__init__.py +605 -0
  10. lollms_client/llm_bindings/llamacpp/__init__.py +18 -11
  11. lollms_client/llm_bindings/lollms/__init__.py +76 -21
  12. lollms_client/llm_bindings/lollms_webui/__init__.py +1 -1
  13. lollms_client/llm_bindings/mistral/__init__.py +2 -2
  14. lollms_client/llm_bindings/novita_ai/__init__.py +142 -6
  15. lollms_client/llm_bindings/ollama/__init__.py +345 -89
  16. lollms_client/llm_bindings/open_router/__init__.py +2 -2
  17. lollms_client/llm_bindings/openai/__init__.py +81 -20
  18. lollms_client/llm_bindings/openllm/__init__.py +362 -506
  19. lollms_client/llm_bindings/openwebui/__init__.py +333 -171
  20. lollms_client/llm_bindings/perplexity/__init__.py +2 -2
  21. lollms_client/llm_bindings/pythonllamacpp/__init__.py +3 -3
  22. lollms_client/llm_bindings/tensor_rt/__init__.py +1 -1
  23. lollms_client/llm_bindings/transformers/__init__.py +428 -632
  24. lollms_client/llm_bindings/vllm/__init__.py +1 -1
  25. lollms_client/lollms_agentic.py +4 -2
  26. lollms_client/lollms_base_binding.py +61 -0
  27. lollms_client/lollms_core.py +512 -1890
  28. lollms_client/lollms_discussion.py +65 -39
  29. lollms_client/lollms_llm_binding.py +126 -261
  30. lollms_client/lollms_mcp_binding.py +49 -77
  31. lollms_client/lollms_stt_binding.py +99 -52
  32. lollms_client/lollms_tti_binding.py +38 -38
  33. lollms_client/lollms_ttm_binding.py +38 -42
  34. lollms_client/lollms_tts_binding.py +43 -18
  35. lollms_client/lollms_ttv_binding.py +38 -42
  36. lollms_client/lollms_types.py +4 -2
  37. lollms_client/stt_bindings/whisper/__init__.py +108 -23
  38. lollms_client/stt_bindings/whispercpp/__init__.py +7 -1
  39. lollms_client/tti_bindings/diffusers/__init__.py +464 -803
  40. lollms_client/tti_bindings/diffusers/server/main.py +1062 -0
  41. lollms_client/tti_bindings/gemini/__init__.py +182 -239
  42. lollms_client/tti_bindings/leonardo_ai/__init__.py +6 -3
  43. lollms_client/tti_bindings/lollms/__init__.py +4 -1
  44. lollms_client/tti_bindings/novita_ai/__init__.py +5 -2
  45. lollms_client/tti_bindings/openai/__init__.py +10 -11
  46. lollms_client/tti_bindings/stability_ai/__init__.py +5 -3
  47. lollms_client/ttm_bindings/audiocraft/__init__.py +7 -12
  48. lollms_client/ttm_bindings/beatoven_ai/__init__.py +7 -3
  49. lollms_client/ttm_bindings/lollms/__init__.py +4 -17
  50. lollms_client/ttm_bindings/replicate/__init__.py +7 -4
  51. lollms_client/ttm_bindings/stability_ai/__init__.py +7 -4
  52. lollms_client/ttm_bindings/topmediai/__init__.py +6 -3
  53. lollms_client/tts_bindings/bark/__init__.py +7 -10
  54. lollms_client/tts_bindings/lollms/__init__.py +6 -1
  55. lollms_client/tts_bindings/piper_tts/__init__.py +8 -11
  56. lollms_client/tts_bindings/xtts/__init__.py +157 -74
  57. lollms_client/tts_bindings/xtts/server/main.py +241 -280
  58. {lollms_client-1.5.6.dist-info → lollms_client-1.7.13.dist-info}/METADATA +113 -5
  59. lollms_client-1.7.13.dist-info/RECORD +90 -0
  60. lollms_client-1.5.6.dist-info/RECORD +0 -87
  61. {lollms_client-1.5.6.dist-info → lollms_client-1.7.13.dist-info}/WHEEL +0 -0
  62. {lollms_client-1.5.6.dist-info → lollms_client-1.7.13.dist-info}/licenses/LICENSE +0 -0
  63. {lollms_client-1.5.6.dist-info → lollms_client-1.7.13.dist-info}/top_level.txt +0 -0
@@ -227,7 +227,7 @@ class OpenRouterBinding(LollmsLLMBinding):
227
227
  "supports_vision": "Depends on the specific model selected. This generic binding does not support vision.",
228
228
  }
229
229
 
230
- def listModels(self) -> List[Dict[str, str]]:
230
+ def list_models(self) -> List[Dict[str, str]]:
231
231
  """Lists available models from the OpenRouter service."""
232
232
  if not self.client:
233
233
  ASCIIColors.error("OpenRouter client not initialized. Cannot list models.")
@@ -274,7 +274,7 @@ if __name__ == '__main__':
274
274
 
275
275
  # --- List Models ---
276
276
  ASCIIColors.cyan("\n--- Listing Models ---")
277
- models = binding.listModels()
277
+ models = binding.list_models()
278
278
  if models:
279
279
  ASCIIColors.green(f"Successfully fetched {len(models)} models from OpenRouter.")
280
280
  ASCIIColors.info("Sample of available models:")
@@ -18,7 +18,7 @@ pm.ensure_packages(["openai","tiktoken"])
18
18
  import openai
19
19
  import tiktoken
20
20
  import os
21
-
21
+ import base64
22
22
  BindingName = "OpenAIBinding"
23
23
 
24
24
 
@@ -300,10 +300,66 @@ class OpenAIBinding(LollmsLLMBinding):
300
300
  streaming_callback: Optional[Callable[[str, MSG_TYPE], None]] = None,
301
301
  **kwargs
302
302
  ) -> Union[str, dict]:
303
- # Build the request parameters
303
+
304
+ # --- Standardize Messages for OpenAI ---
305
+ def normalize_message(msg: Dict) -> Dict:
306
+ role = msg.get("role", "user")
307
+ content = msg.get("content", "")
308
+ text_parts = []
309
+ images = []
310
+
311
+ # 1. Extract Text and Images from input
312
+ if isinstance(content, str):
313
+ text_parts.append(content)
314
+ elif isinstance(content, list):
315
+ for item in content:
316
+ if item.get("type") == "text":
317
+ text_parts.append(item.get("text", ""))
318
+ elif item.get("type") in ["input_image", "image_url"]:
319
+ # Handle various internal representations of images
320
+ val = item.get("image_url")
321
+ if isinstance(val, dict):
322
+ # Handle dicts like {"url": "..."} or {"base64": "..."}
323
+ val = val.get("url") or val.get("base64")
324
+
325
+ if isinstance(val, str) and val:
326
+ images.append(val)
327
+
328
+ text_content = "\n".join([p for p in text_parts if p.strip()])
329
+
330
+ # 2. Format for OpenAI API
331
+ if not images:
332
+ # Simple text-only message
333
+ return {"role": role, "content": text_content}
334
+ else:
335
+ # Multimodal message
336
+ openai_content = []
337
+ if text_content:
338
+ openai_content.append({"type": "text", "text": text_content})
339
+
340
+ for img in images:
341
+ # OpenAI STRICTLY requires the data URI prefix for base64
342
+ # or a valid http/https URL.
343
+ img_url = img
344
+ if not img.startswith("http"):
345
+ if not img.startswith("data:"):
346
+ # If raw base64 is passed without header, add default jpeg header
347
+ img_url = f"data:image/jpeg;base64,{img}"
348
+
349
+ openai_content.append({
350
+ "type": "image_url",
351
+ "image_url": {"url": img_url}
352
+ })
353
+
354
+ return {"role": role, "content": openai_content}
355
+
356
+ # Process and clean the list
357
+ openai_messages = [normalize_message(m) for m in messages]
358
+
359
+ # --- Build Request ---
304
360
  params = {
305
361
  "model": self.model_name,
306
- "messages": messages,
362
+ "messages": openai_messages, # Use the standardized list
307
363
  "max_tokens": n_predict,
308
364
  "n": 1,
309
365
  "temperature": temperature,
@@ -311,34 +367,39 @@ class OpenAIBinding(LollmsLLMBinding):
311
367
  "frequency_penalty": repeat_penalty,
312
368
  "stream": stream
313
369
  }
314
- # Add seed if available, as it's supported by newer OpenAI models
370
+
371
+ # Add seed if available
315
372
  if seed is not None:
316
373
  params["seed"] = seed
317
374
 
318
- # Remove None values, as the API expects them to be absent
375
+ # Remove None values
319
376
  params = {k: v for k, v in params.items() if v is not None}
320
377
 
321
378
  output = ""
322
- # 2. Call the API
379
+
380
+ # --- Call API ---
323
381
  try:
324
382
  try:
325
383
  completion = self.client.chat.completions.create(**params)
326
384
  except Exception as ex:
327
- # exception for new openai models
328
- params["max_completion_tokens"]=params["max_tokens"]
329
- params["temperature"]=1
330
- try: del params["max_tokens"]
331
- except Exception: pass
332
- try: del params["top_p"]
333
- except Exception: pass
334
- try: del params["frequency_penalty"]
335
- except Exception: pass
336
-
385
+ # Fallback/Handling for 'reasoning' models (o1, o3-mini)
386
+ # which don't support max_tokens, temperature, etc.
387
+ if "max_tokens" in params:
388
+ params["max_completion_tokens"] = params["max_tokens"]
389
+ del params["max_tokens"]
337
390
 
391
+ # Set temperature to 1 (required for some o-series models) or remove it
392
+ params["temperature"] = 1
393
+
394
+ keys_to_remove = ["top_p", "frequency_penalty", "presence_penalty"]
395
+ for k in keys_to_remove:
396
+ if k in params:
397
+ del params[k]
398
+
338
399
  completion = self.client.chat.completions.create(**params)
400
+
339
401
  if stream:
340
402
  for chunk in completion:
341
- # The streaming response for chat has a different structure
342
403
  delta = chunk.choices[0].delta
343
404
  if delta.content:
344
405
  word = delta.content
@@ -350,7 +411,6 @@ class OpenAIBinding(LollmsLLMBinding):
350
411
  output = completion.choices[0].message.content
351
412
 
352
413
  except Exception as e:
353
- # Handle API errors gracefully
354
414
  error_message = f"An error occurred with the OpenAI API: {e}"
355
415
  if streaming_callback:
356
416
  streaming_callback(error_message, MSG_TYPE.MSG_TYPE_EXCEPTION)
@@ -371,7 +431,8 @@ class OpenAIBinding(LollmsLLMBinding):
371
431
  seed: Optional[int] = None,
372
432
  n_threads: Optional[int] = None,
373
433
  ctx_size: Optional[int] = None,
374
- streaming_callback: Optional[Callable[[str, MSG_TYPE], None]] = None
434
+ streaming_callback: Optional[Callable[[str, MSG_TYPE], None]] = None,
435
+ **kwargs
375
436
  ) -> Union[str, dict]:
376
437
 
377
438
  messages = discussion.export("openai_chat", branch_tip_id)
@@ -648,7 +709,7 @@ class OpenAIBinding(LollmsLLMBinding):
648
709
  "model_name": self.model_name
649
710
  }
650
711
 
651
- def listModels(self) -> List[Dict]:
712
+ def list_models(self) -> List[Dict]:
652
713
  # Known context lengths
653
714
  known_context_lengths = {
654
715
  "gpt-4o": 128000,