lollms-client 1.4.1__py3-none-any.whl → 1.7.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. lollms_client/__init__.py +1 -1
  2. lollms_client/llm_bindings/azure_openai/__init__.py +2 -2
  3. lollms_client/llm_bindings/claude/__init__.py +125 -34
  4. lollms_client/llm_bindings/gemini/__init__.py +261 -159
  5. lollms_client/llm_bindings/grok/__init__.py +52 -14
  6. lollms_client/llm_bindings/groq/__init__.py +2 -2
  7. lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +2 -2
  8. lollms_client/llm_bindings/litellm/__init__.py +1 -1
  9. lollms_client/llm_bindings/llamacpp/__init__.py +18 -11
  10. lollms_client/llm_bindings/lollms/__init__.py +151 -32
  11. lollms_client/llm_bindings/lollms_webui/__init__.py +1 -1
  12. lollms_client/llm_bindings/mistral/__init__.py +2 -2
  13. lollms_client/llm_bindings/novita_ai/__init__.py +439 -0
  14. lollms_client/llm_bindings/ollama/__init__.py +309 -93
  15. lollms_client/llm_bindings/open_router/__init__.py +2 -2
  16. lollms_client/llm_bindings/openai/__init__.py +148 -29
  17. lollms_client/llm_bindings/openllm/__init__.py +362 -506
  18. lollms_client/llm_bindings/openwebui/__init__.py +465 -0
  19. lollms_client/llm_bindings/perplexity/__init__.py +326 -0
  20. lollms_client/llm_bindings/pythonllamacpp/__init__.py +3 -3
  21. lollms_client/llm_bindings/tensor_rt/__init__.py +1 -1
  22. lollms_client/llm_bindings/transformers/__init__.py +428 -632
  23. lollms_client/llm_bindings/vllm/__init__.py +1 -1
  24. lollms_client/lollms_agentic.py +4 -2
  25. lollms_client/lollms_base_binding.py +61 -0
  26. lollms_client/lollms_core.py +516 -1890
  27. lollms_client/lollms_discussion.py +55 -18
  28. lollms_client/lollms_llm_binding.py +112 -261
  29. lollms_client/lollms_mcp_binding.py +34 -75
  30. lollms_client/lollms_personality.py +5 -2
  31. lollms_client/lollms_stt_binding.py +85 -52
  32. lollms_client/lollms_tti_binding.py +23 -37
  33. lollms_client/lollms_ttm_binding.py +24 -42
  34. lollms_client/lollms_tts_binding.py +28 -17
  35. lollms_client/lollms_ttv_binding.py +24 -42
  36. lollms_client/lollms_types.py +4 -2
  37. lollms_client/stt_bindings/whisper/__init__.py +108 -23
  38. lollms_client/stt_bindings/whispercpp/__init__.py +7 -1
  39. lollms_client/tti_bindings/diffusers/__init__.py +418 -810
  40. lollms_client/tti_bindings/diffusers/server/main.py +1051 -0
  41. lollms_client/tti_bindings/gemini/__init__.py +182 -239
  42. lollms_client/tti_bindings/leonardo_ai/__init__.py +127 -0
  43. lollms_client/tti_bindings/lollms/__init__.py +4 -1
  44. lollms_client/tti_bindings/novita_ai/__init__.py +105 -0
  45. lollms_client/tti_bindings/openai/__init__.py +10 -11
  46. lollms_client/tti_bindings/stability_ai/__init__.py +178 -0
  47. lollms_client/ttm_bindings/audiocraft/__init__.py +7 -12
  48. lollms_client/ttm_bindings/beatoven_ai/__init__.py +129 -0
  49. lollms_client/ttm_bindings/lollms/__init__.py +4 -17
  50. lollms_client/ttm_bindings/replicate/__init__.py +115 -0
  51. lollms_client/ttm_bindings/stability_ai/__init__.py +117 -0
  52. lollms_client/ttm_bindings/topmediai/__init__.py +96 -0
  53. lollms_client/tts_bindings/bark/__init__.py +7 -10
  54. lollms_client/tts_bindings/lollms/__init__.py +6 -1
  55. lollms_client/tts_bindings/piper_tts/__init__.py +8 -11
  56. lollms_client/tts_bindings/xtts/__init__.py +157 -74
  57. lollms_client/tts_bindings/xtts/server/main.py +241 -280
  58. {lollms_client-1.4.1.dist-info → lollms_client-1.7.10.dist-info}/METADATA +316 -6
  59. lollms_client-1.7.10.dist-info/RECORD +89 -0
  60. lollms_client/ttm_bindings/bark/__init__.py +0 -339
  61. lollms_client-1.4.1.dist-info/RECORD +0 -78
  62. {lollms_client-1.4.1.dist-info → lollms_client-1.7.10.dist-info}/WHEEL +0 -0
  63. {lollms_client-1.4.1.dist-info → lollms_client-1.7.10.dist-info}/licenses/LICENSE +0 -0
  64. {lollms_client-1.4.1.dist-info → lollms_client-1.7.10.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,6 @@
1
+ #lollms_client/lollms_discussion.py
2
+ #author : ParisNeo
3
+
1
4
  import base64
2
5
  import json
3
6
  import re
@@ -214,6 +217,11 @@ class LollmsDataManager:
214
217
  self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
215
218
  self.create_and_migrate_tables()
216
219
 
220
+ def discussion_exists(self, discussion_id: str) -> bool:
221
+ """Checks if a discussion with the given ID exists in the database."""
222
+ with self.get_session() as session:
223
+ return session.query(self.DiscussionModel).filter_by(id=discussion_id).first() is not None
224
+
217
225
  @staticmethod
218
226
  def new_message(**kwargs) -> 'SimpleNamespace':
219
227
  """A static factory method to create a new message data object.
@@ -851,10 +859,6 @@ class LollmsDiscussion:
851
859
  ASCIIColors.success(f"Active branch ID for discussion {self.id} updated to: {new_active_leaf_id} (deepest leaf descendant).")
852
860
  elif new_active_leaf_id is None: # Should not happen if current_active_id exists
853
861
  ASCIIColors.warning(f"Could not find a deeper leaf from '{current_active_id}'. Keeping current ID.")
854
- else:
855
- ASCIIColors.info(f"Active branch ID '{current_active_id}' is already the deepest leaf. No change needed.")
856
- else:
857
- ASCIIColors.info(f"Active branch ID '{current_active_id}' is already a leaf. No change needed.")
858
862
 
859
863
 
860
864
  def touch(self):
@@ -925,6 +929,18 @@ class LollmsDiscussion:
925
929
  else:
926
930
  kwargs['sender_type'] = 'assistant'
927
931
 
932
+ # --- NEW PARTICIPANT LOGIC ---
933
+ if kwargs.get('sender_type') == 'user':
934
+ sender_name = kwargs.get('sender')
935
+ sender_icon = kwargs.get('sender_icon')
936
+ if sender_name:
937
+ if self.participants is None:
938
+ self.participants = {}
939
+ # Update only if not present or icon is missing
940
+ if sender_name not in self.participants or self.participants[sender_name].get('icon') is None:
941
+ self.participants[sender_name] = {"icon": sender_icon}
942
+ self.touch()
943
+ # --- END NEW PARTICIPANT LOGIC ---
928
944
 
929
945
  message_data = {
930
946
  'id': msg_id,
@@ -1000,6 +1016,13 @@ class LollmsDiscussion:
1000
1016
  self._rebuild_message_index() # Ensure index is fresh
1001
1017
  return [LollmsMessage(self, msg_obj) for msg_obj in self._message_index.values()]
1002
1018
 
1019
+ def setMemory(self, memory:str):
1020
+ """sets memory content
1021
+
1022
+ Args:
1023
+ memory (str): _description_
1024
+ """
1025
+ self.memory = memory
1003
1026
 
1004
1027
  def get_full_data_zone(self):
1005
1028
  """Assembles all data zones into a single, formatted string for the prompt."""
@@ -1094,7 +1117,7 @@ class LollmsDiscussion:
1094
1117
  if callback:
1095
1118
  qg_id = callback("Generating query for dynamic personality data...", MSG_TYPE.MSG_TYPE_STEP_START, {"id": "dynamic_data_query_gen"})
1096
1119
 
1097
- context_for_query = self.export('markdown')
1120
+ context_for_query = self.export('markdown', suppress_system_prompt=True)
1098
1121
  query_prompt = (
1099
1122
  "You are an expert query generator. Based on the current conversation, formulate a concise and specific query to retrieve relevant information from a knowledge base. "
1100
1123
  "The query will be used to fetch data that will help you answer the user's latest request.\n\n"
@@ -1155,7 +1178,7 @@ class LollmsDiscussion:
1155
1178
  # Step 1: Add user message, now including any images.
1156
1179
  if add_user_message:
1157
1180
  user_msg = self.add_message(
1158
- sender="user",
1181
+ sender=kwargs.get("user_name", "user"),
1159
1182
  sender_type="user",
1160
1183
  content=user_message,
1161
1184
  images=images,
@@ -1182,17 +1205,30 @@ class LollmsDiscussion:
1182
1205
  final_content = ""
1183
1206
 
1184
1207
  if is_agentic_turn:
1185
- prompt_for_agent = self.export("markdown", branch_tip_id if branch_tip_id else self.active_branch_id)
1208
+ prompt_for_agent = self.export("markdown", branch_tip_id if branch_tip_id else self.active_branch_id, suppress_system_prompt=True)
1186
1209
  if debug:
1187
1210
  ASCIIColors.cyan("\n" + "="*50 + "\n--- DEBUG: AGENTIC TURN TRIGGERED ---\n" + f"--- PROMPT FOR AGENT (from discussion history) ---\n{prompt_for_agent}\n" + "="*50 + "\n")
1188
-
1211
+
1212
+
1213
+ # Combine system prompt and data zones
1214
+ system_prompt_part = (self._system_prompt or "").strip()
1215
+ data_zone_part = self.get_full_data_zone() # This now returns a clean, multi-part block or an empty string
1216
+ full_system_prompt = ""
1217
+
1218
+ # Combine them intelligently
1219
+ if system_prompt_part and data_zone_part:
1220
+ full_system_prompt = f"{system_prompt_part}\n\n{data_zone_part}"
1221
+ elif system_prompt_part:
1222
+ full_system_prompt = system_prompt_part
1223
+ else:
1224
+ full_system_prompt = data_zone_part
1189
1225
  agent_result = self.lollmsClient.generate_with_mcp_rag(
1190
1226
  prompt=prompt_for_agent,
1191
1227
  use_mcps=effective_use_mcps,
1192
1228
  use_data_store=use_data_store,
1193
1229
  max_reasoning_steps=max_reasoning_steps,
1194
1230
  images=images,
1195
- system_prompt = self._system_prompt,
1231
+ system_prompt = full_system_prompt,
1196
1232
  debug=debug,
1197
1233
  **kwargs
1198
1234
  )
@@ -1403,7 +1439,7 @@ class LollmsDiscussion:
1403
1439
  self.touch() # Mark for update and auto-save if configured
1404
1440
  print(f"Branch starting at {message_id} ({len(messages_to_delete_ids)} messages) removed. New active branch: {self.active_branch_id}")
1405
1441
 
1406
- def export(self, format_type: str, branch_tip_id: Optional[str] = None, max_allowed_tokens: Optional[int] = None) -> Union[List[Dict], str]:
1442
+ def export(self, format_type: str, branch_tip_id: Optional[str] = None, max_allowed_tokens: Optional[int] = None, suppress_system_prompt=False) -> Union[List[Dict], str]:
1407
1443
  """Exports the discussion history into a specified format.
1408
1444
 
1409
1445
  This method can format the conversation for different backends like OpenAI,
@@ -1441,12 +1477,13 @@ class LollmsDiscussion:
1441
1477
  full_system_prompt = ""
1442
1478
 
1443
1479
  # Combine them intelligently
1444
- if system_prompt_part and data_zone_part:
1445
- full_system_prompt = f"{system_prompt_part}\n\n{data_zone_part}"
1446
- elif system_prompt_part:
1447
- full_system_prompt = system_prompt_part
1448
- else:
1449
- full_system_prompt = data_zone_part
1480
+ if not suppress_system_prompt:
1481
+ if system_prompt_part and data_zone_part:
1482
+ full_system_prompt = f"{system_prompt_part}\n\n{data_zone_part}"
1483
+ elif system_prompt_part:
1484
+ full_system_prompt = system_prompt_part
1485
+ else:
1486
+ full_system_prompt = data_zone_part
1450
1487
 
1451
1488
 
1452
1489
  participants = self.participants or {}
@@ -2368,7 +2405,7 @@ class LollmsDiscussion:
2368
2405
  return self.add_artefact(
2369
2406
  title, content=new_content, images=new_images,
2370
2407
  audios=latest_artefact.get("audios", []),videos=latest_artefact.get("videos", []),
2371
- zip_content=latest_artefact.get("zip"), version=latest_version + 1, **extra_data
2408
+ zip_content=latest_artefact.get("zip"), **extra_data
2372
2409
  )
2373
2410
 
2374
2411
  def load_artefact_into_data_zone(self, title: str, version: Optional[int] = None):
@@ -2585,4 +2622,4 @@ class LollmsDiscussion:
2585
2622
  if db_manager:
2586
2623
  new_discussion.commit()
2587
2624
 
2588
- return new_discussion
2625
+ return new_discussion