lollms-client 1.7.10__py3-none-any.whl → 1.8.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lollms_client/__init__.py +1 -1
- lollms_client/llm_bindings/claude/__init__.py +0 -1
- lollms_client/llm_bindings/grok/__init__.py +0 -1
- lollms_client/llm_bindings/llama_cpp_server/__init__.py +726 -0
- lollms_client/llm_bindings/ollama/__init__.py +40 -2
- lollms_client/lollms_discussion.py +209 -65
- lollms_client/lollms_llm_binding.py +15 -1
- lollms_client/lollms_mcp_binding.py +15 -3
- lollms_client/lollms_stt_binding.py +16 -2
- lollms_client/lollms_tti_binding.py +16 -2
- lollms_client/lollms_ttm_binding.py +16 -2
- lollms_client/lollms_tts_binding.py +16 -2
- lollms_client/lollms_ttv_binding.py +16 -2
- lollms_client/tti_bindings/diffusers/__init__.py +132 -79
- lollms_client/tti_bindings/diffusers/server/main.py +76 -65
- lollms_client/tti_bindings/open_router/__init__.py +341 -0
- lollms_client/tts_bindings/xtts/__init__.py +1 -1
- {lollms_client-1.7.10.dist-info → lollms_client-1.8.3.dist-info}/METADATA +1 -1
- {lollms_client-1.7.10.dist-info → lollms_client-1.8.3.dist-info}/RECORD +22 -21
- lollms_client/llm_bindings/llamacpp/__init__.py +0 -1155
- {lollms_client-1.7.10.dist-info → lollms_client-1.8.3.dist-info}/WHEEL +0 -0
- {lollms_client-1.7.10.dist-info → lollms_client-1.8.3.dist-info}/licenses/LICENSE +0 -0
- {lollms_client-1.7.10.dist-info → lollms_client-1.8.3.dist-info}/top_level.txt +0 -0
|
@@ -7,7 +7,7 @@ from lollms_client.lollms_types import MSG_TYPE
|
|
|
7
7
|
# from lollms_client.lollms_utilities import encode_image
|
|
8
8
|
from lollms_client.lollms_types import ELF_COMPLETION_FORMAT
|
|
9
9
|
from lollms_client.lollms_discussion import LollmsDiscussion
|
|
10
|
-
from typing import Optional, Callable, List, Union, Dict
|
|
10
|
+
from typing import Optional, Callable, List, Union, Dict, Any
|
|
11
11
|
|
|
12
12
|
from ascii_colors import ASCIIColors, trace_exception
|
|
13
13
|
import pipmaster as pm
|
|
@@ -718,6 +718,44 @@ class OllamaBinding(LollmsLLMBinding):
|
|
|
718
718
|
trace_exception(ex)
|
|
719
719
|
return {"status": False, "message": msg}
|
|
720
720
|
|
|
721
|
+
def get_zoo(self) -> List[Dict[str, Any]]:
|
|
722
|
+
"""
|
|
723
|
+
Returns a list of models available for download.
|
|
724
|
+
each entry is a dict with:
|
|
725
|
+
name, description, size, type, link
|
|
726
|
+
"""
|
|
727
|
+
return [
|
|
728
|
+
{"name": "Llama3 8B", "description": "Meta's Llama 3 8B model. Good for general purpose chat.", "size": "4.7GB", "type": "model", "link": "llama3"},
|
|
729
|
+
{"name": "Llama3 70B", "description": "Meta's Llama 3 70B model. High capability.", "size": "40GB", "type": "model", "link": "llama3:70b"},
|
|
730
|
+
{"name": "Phi-3 Mini", "description": "Microsoft's Phi-3 Mini 3.8B model. Lightweight and capable.", "size": "2.3GB", "type": "model", "link": "phi3"},
|
|
731
|
+
{"name": "Phi-3 Medium", "description": "Microsoft's Phi-3 Medium 14B model.", "size": "7.9GB", "type": "model", "link": "phi3:medium"},
|
|
732
|
+
{"name": "Mistral 7B", "description": "Mistral AI's 7B model v0.3.", "size": "4.1GB", "type": "model", "link": "mistral"},
|
|
733
|
+
{"name": "Mixtral 8x7B", "description": "Mistral AI's Mixture of Experts model.", "size": "26GB", "type": "model", "link": "mixtral"},
|
|
734
|
+
{"name": "Gemma 2 9B", "description": "Google's Gemma 2 9B model.", "size": "5.4GB", "type": "model", "link": "gemma2"},
|
|
735
|
+
{"name": "Gemma 2 27B", "description": "Google's Gemma 2 27B model.", "size": "16GB", "type": "model", "link": "gemma2:27b"},
|
|
736
|
+
{"name": "Qwen 2.5 7B", "description": "Alibaba Cloud's Qwen2.5 7B model.", "size": "4.5GB", "type": "model", "link": "qwen2.5"},
|
|
737
|
+
{"name": "Qwen 2.5 Coder 7B", "description": "Alibaba Cloud's Qwen2.5 Coder 7B model.", "size": "4.5GB", "type": "model", "link": "qwen2.5-coder"},
|
|
738
|
+
{"name": "CodeLlama 7B", "description": "Meta's CodeLlama 7B model.", "size": "3.8GB", "type": "model", "link": "codellama"},
|
|
739
|
+
{"name": "LLaVA 7B", "description": "Visual instruction tuning model (Vision).", "size": "4.5GB", "type": "model", "link": "llava"},
|
|
740
|
+
{"name": "Nomic Embed Text", "description": "A high-performing open embedding model.", "size": "274MB", "type": "embedding", "link": "nomic-embed-text"},
|
|
741
|
+
{"name": "DeepSeek Coder V2", "description": "DeepSeek Coder V2 model.", "size": "8.9GB", "type": "model", "link": "deepseek-coder-v2"},
|
|
742
|
+
{"name": "OpenHermes 2.5 Mistral", "description": "High quality finetune of Mistral 7B.", "size": "4.1GB", "type": "model", "link": "openhermes"},
|
|
743
|
+
{"name": "Dolphin Phi", "description": "Uncensored Dolphin fine-tune of Phi-2.", "size": "1.6GB", "type": "model", "link": "dolphin-phi"},
|
|
744
|
+
{"name": "TinyLlama", "description": "A compact 1.1B model.", "size": "637MB", "type": "model", "link": "tinyllama"},
|
|
745
|
+
]
|
|
746
|
+
|
|
747
|
+
def download_from_zoo(self, index: int, progress_callback: Callable[[dict], None] = None) -> dict:
|
|
748
|
+
"""
|
|
749
|
+
Downloads a model from the zoo using its index.
|
|
750
|
+
"""
|
|
751
|
+
zoo = self.get_zoo()
|
|
752
|
+
if index < 0 or index >= len(zoo):
|
|
753
|
+
msg = "Index out of bounds"
|
|
754
|
+
ASCIIColors.error(msg)
|
|
755
|
+
return {"status": False, "message": msg}
|
|
756
|
+
item = zoo[index]
|
|
757
|
+
return self.pull_model(item["link"], progress_callback=progress_callback)
|
|
758
|
+
|
|
721
759
|
def install_ollama(self, callback: Callable[[dict], None] = None, **kwargs) -> dict:
|
|
722
760
|
"""
|
|
723
761
|
Installs Ollama based on the operating system.
|
|
@@ -1157,4 +1195,4 @@ if __name__ == '__main__':
|
|
|
1157
1195
|
ASCIIColors.error(f"An error occurred during testing: {e}")
|
|
1158
1196
|
trace_exception(e)
|
|
1159
1197
|
|
|
1160
|
-
ASCIIColors.yellow("\nOllamaBinding test finished.")
|
|
1198
|
+
ASCIIColors.yellow("\nOllamaBinding test finished.")
|
|
@@ -34,8 +34,7 @@ if False:
|
|
|
34
34
|
|
|
35
35
|
from lollms_client.lollms_utilities import build_image_dicts, robust_json_parser
|
|
36
36
|
from ascii_colors import ASCIIColors, trace_exception
|
|
37
|
-
|
|
38
|
-
# from .lollms_types import MSG_TYPE
|
|
37
|
+
from lollms_client.lollms_types import MSG_TYPE
|
|
39
38
|
|
|
40
39
|
class EncryptedString(TypeDecorator):
|
|
41
40
|
"""A SQLAlchemy TypeDecorator for field-level database encryption.
|
|
@@ -472,30 +471,156 @@ class LollmsMessage:
|
|
|
472
471
|
if i < len(self.active_images) and self.active_images[i]
|
|
473
472
|
]
|
|
474
473
|
|
|
474
|
+
def _sync_active_images_flags(self):
|
|
475
|
+
"""Re-calculates active_images boolean list based on image_groups metadata."""
|
|
476
|
+
current_images = self.images or []
|
|
477
|
+
if not current_images:
|
|
478
|
+
self.active_images = []
|
|
479
|
+
return
|
|
480
|
+
|
|
481
|
+
metadata = self.metadata or {}
|
|
482
|
+
# Support legacy key "image_generation_groups" by merging or checking both
|
|
483
|
+
groups = metadata.get("image_groups", []) + metadata.get("image_generation_groups", [])
|
|
484
|
+
|
|
485
|
+
new_active_flags = [False] * len(current_images)
|
|
486
|
+
|
|
487
|
+
grouped_indices = set()
|
|
488
|
+
|
|
489
|
+
for group in groups:
|
|
490
|
+
indices = group.get("indices", [])
|
|
491
|
+
for i in indices:
|
|
492
|
+
grouped_indices.add(i)
|
|
493
|
+
|
|
494
|
+
# Group is active by default unless explicitly False
|
|
495
|
+
is_group_active = group.get("is_active", True)
|
|
496
|
+
|
|
497
|
+
if is_group_active:
|
|
498
|
+
main_idx = group.get("main_image_index")
|
|
499
|
+
# Fallback: if main_idx invalid, use first index
|
|
500
|
+
if main_idx is None or main_idx not in indices:
|
|
501
|
+
if indices:
|
|
502
|
+
main_idx = indices[0]
|
|
503
|
+
|
|
504
|
+
if main_idx is not None and 0 <= main_idx < len(new_active_flags):
|
|
505
|
+
new_active_flags[main_idx] = True
|
|
506
|
+
|
|
507
|
+
# Legacy/Ungrouped images are active by default
|
|
508
|
+
for i in range(len(current_images)):
|
|
509
|
+
if i not in grouped_indices:
|
|
510
|
+
new_active_flags[i] = True
|
|
511
|
+
|
|
512
|
+
self.active_images = new_active_flags
|
|
513
|
+
|
|
475
514
|
def toggle_image_activation(self, index: int, active: Optional[bool] = None):
|
|
476
515
|
"""
|
|
477
516
|
Toggles or sets the activation status of an image at a given index.
|
|
478
|
-
This
|
|
517
|
+
This handles groups/packs: if the image belongs to a group, enabling it
|
|
518
|
+
will disable others in that group and set it as main.
|
|
479
519
|
|
|
480
520
|
Args:
|
|
481
521
|
index: The index of the image in the 'images' list.
|
|
482
|
-
active: If provided, sets the status to this boolean. If None, toggles
|
|
522
|
+
active: If provided, sets the status to this boolean. If None, toggles.
|
|
483
523
|
"""
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
524
|
+
metadata = (self.metadata or {}).copy()
|
|
525
|
+
groups = metadata.get("image_groups", []) + metadata.get("image_generation_groups", [])
|
|
526
|
+
|
|
527
|
+
target_group = next((g for g in groups if index in g.get("indices", [])), None)
|
|
528
|
+
|
|
529
|
+
if target_group:
|
|
530
|
+
# If explicit active state provided
|
|
531
|
+
if active is not None:
|
|
532
|
+
if active:
|
|
533
|
+
target_group["is_active"] = True
|
|
534
|
+
target_group["main_image_index"] = index
|
|
535
|
+
else:
|
|
536
|
+
# If setting specific image to inactive
|
|
537
|
+
if target_group.get("main_image_index") == index:
|
|
538
|
+
target_group["is_active"] = False
|
|
539
|
+
else:
|
|
540
|
+
# Toggle logic
|
|
541
|
+
# If clicking the currently active main image -> toggle group active state
|
|
542
|
+
if target_group.get("main_image_index") == index:
|
|
543
|
+
target_group["is_active"] = not target_group.get("is_active", True)
|
|
544
|
+
else:
|
|
545
|
+
# Select new image and ensure group is active
|
|
546
|
+
target_group["main_image_index"] = index
|
|
547
|
+
target_group["is_active"] = True
|
|
548
|
+
|
|
549
|
+
self.metadata = metadata
|
|
550
|
+
self._sync_active_images_flags()
|
|
551
|
+
|
|
490
552
|
else:
|
|
491
|
-
|
|
553
|
+
# Ungrouped image - wrap in a new single-item group to persist state
|
|
554
|
+
new_group = {
|
|
555
|
+
"id": str(uuid.uuid4()),
|
|
556
|
+
"type": "upload",
|
|
557
|
+
"indices": [index],
|
|
558
|
+
"created_at": datetime.utcnow().isoformat(),
|
|
559
|
+
"main_image_index": index,
|
|
560
|
+
"is_active": active if active is not None else not (self.active_images and self.active_images[index])
|
|
561
|
+
}
|
|
562
|
+
if "image_groups" not in metadata:
|
|
563
|
+
metadata["image_groups"] = []
|
|
564
|
+
metadata["image_groups"].append(new_group)
|
|
565
|
+
|
|
566
|
+
self.metadata = metadata
|
|
567
|
+
self._sync_active_images_flags()
|
|
492
568
|
|
|
493
|
-
if
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
569
|
+
if self._discussion._is_db_backed:
|
|
570
|
+
self._discussion.commit()
|
|
571
|
+
|
|
572
|
+
def add_image_pack(self, images: List[str], group_type: str = "generated", active_by_default: bool = True, title: str = None) -> None:
|
|
573
|
+
"""
|
|
574
|
+
Adds a list of images as a new pack/group.
|
|
575
|
+
|
|
576
|
+
- The new images are appended to the existing images list.
|
|
577
|
+
- A new group entry is added to 'image_groups' metadata with 'main_image_index' and 'is_active'.
|
|
578
|
+
|
|
579
|
+
Args:
|
|
580
|
+
images: List of base64 image strings.
|
|
581
|
+
group_type: Type label for the group (e.g., 'generated', 'upload').
|
|
582
|
+
active_by_default: If True, the pack is activated.
|
|
583
|
+
title: Optional title for the image pack (e.g., the prompt).
|
|
584
|
+
"""
|
|
585
|
+
if not images:
|
|
586
|
+
return
|
|
587
|
+
|
|
588
|
+
current_images = self.images or []
|
|
589
|
+
start_index = len(current_images)
|
|
590
|
+
|
|
591
|
+
# Append new images
|
|
592
|
+
current_images.extend(images)
|
|
593
|
+
self.images = current_images
|
|
594
|
+
|
|
595
|
+
# Update Metadata with Group info
|
|
596
|
+
metadata = (self.metadata or {}).copy()
|
|
597
|
+
groups = metadata.get("image_groups", [])
|
|
598
|
+
|
|
599
|
+
new_indices = list(range(start_index, start_index + len(images)))
|
|
600
|
+
|
|
601
|
+
# Default main image is the first one in the pack
|
|
602
|
+
main_image_idx = new_indices[0] if new_indices else None
|
|
603
|
+
|
|
604
|
+
group_entry = {
|
|
605
|
+
"id": str(uuid.uuid4()),
|
|
606
|
+
"type": group_type,
|
|
607
|
+
"indices": new_indices,
|
|
608
|
+
"created_at": datetime.utcnow().isoformat(),
|
|
609
|
+
"main_image_index": main_image_idx,
|
|
610
|
+
"is_active": active_by_default
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
if title:
|
|
614
|
+
group_entry["title"] = title
|
|
615
|
+
|
|
616
|
+
groups.append(group_entry)
|
|
617
|
+
|
|
618
|
+
metadata["image_groups"] = groups
|
|
619
|
+
self.metadata = metadata
|
|
620
|
+
|
|
621
|
+
# Sync active_images list based on new group data
|
|
622
|
+
self._sync_active_images_flags()
|
|
497
623
|
|
|
498
|
-
self.active_images = new_active_images
|
|
499
624
|
if self._discussion._is_db_backed:
|
|
500
625
|
self._discussion.commit()
|
|
501
626
|
|
|
@@ -963,11 +1088,34 @@ class LollmsDiscussion:
|
|
|
963
1088
|
else:
|
|
964
1089
|
new_msg_orm = SimpleNamespace(**message_data)
|
|
965
1090
|
self._db_discussion.messages.append(new_msg_orm)
|
|
1091
|
+
|
|
1092
|
+
# Wrap the new message immediately to perform updates
|
|
1093
|
+
wrapped_msg = LollmsMessage(self, new_msg_orm)
|
|
1094
|
+
|
|
1095
|
+
# Automatically group user uploaded images if present and not already grouped
|
|
1096
|
+
images_list = kwargs.get('images', [])
|
|
1097
|
+
if images_list and kwargs.get('sender_type') == 'user':
|
|
1098
|
+
meta = wrapped_msg.metadata or {}
|
|
1099
|
+
# Check if groups already exist (e.g. if loaded from JSON backup)
|
|
1100
|
+
if not meta.get('image_groups'):
|
|
1101
|
+
groups = []
|
|
1102
|
+
for i in range(len(images_list)):
|
|
1103
|
+
groups.append({
|
|
1104
|
+
"id": str(uuid.uuid4()),
|
|
1105
|
+
"type": "upload",
|
|
1106
|
+
"indices": [i],
|
|
1107
|
+
"created_at": datetime.utcnow().isoformat(),
|
|
1108
|
+
"main_image_index": i,
|
|
1109
|
+
"is_active": True
|
|
1110
|
+
})
|
|
1111
|
+
meta["image_groups"] = groups
|
|
1112
|
+
wrapped_msg.metadata = meta
|
|
1113
|
+
wrapped_msg._sync_active_images_flags()
|
|
966
1114
|
|
|
967
1115
|
self.active_branch_id = msg_id # New message is always a leaf
|
|
968
1116
|
self._message_index[msg_id] = new_msg_orm
|
|
969
1117
|
self.touch()
|
|
970
|
-
return
|
|
1118
|
+
return wrapped_msg
|
|
971
1119
|
|
|
972
1120
|
def get_branch(self, leaf_id: Optional[str]) -> List[LollmsMessage]:
|
|
973
1121
|
"""Traces a branch of the conversation from a leaf message back to the root.
|
|
@@ -1054,7 +1202,7 @@ class LollmsDiscussion:
|
|
|
1054
1202
|
debug: bool = False,
|
|
1055
1203
|
remove_thinking_blocks:bool = True,
|
|
1056
1204
|
**kwargs
|
|
1057
|
-
) -> Dict[str,
|
|
1205
|
+
) -> Dict[str, Any]:
|
|
1058
1206
|
"""Main interaction method that can invoke the dynamic, multi-modal agent.
|
|
1059
1207
|
|
|
1060
1208
|
This method orchestrates the entire response generation process. It can
|
|
@@ -1095,14 +1243,37 @@ class LollmsDiscussion:
|
|
|
1095
1243
|
where the 'ai_message' will contain rich metadata if an agentic turn was used.
|
|
1096
1244
|
"""
|
|
1097
1245
|
callback = kwargs.get("streaming_callback")
|
|
1246
|
+
collected_sources = []
|
|
1247
|
+
|
|
1248
|
+
|
|
1249
|
+
# Step 1: Add user message, now including any images.
|
|
1250
|
+
if add_user_message:
|
|
1251
|
+
user_msg = self.add_message(
|
|
1252
|
+
sender=kwargs.get("user_name", "user"),
|
|
1253
|
+
sender_type="user",
|
|
1254
|
+
content=user_message,
|
|
1255
|
+
images=images,
|
|
1256
|
+
**kwargs
|
|
1257
|
+
)
|
|
1258
|
+
else: # Regeneration logic
|
|
1259
|
+
# _validate_and_set_active_branch ensures active_branch_id is valid and a leaf.
|
|
1260
|
+
# So, if we are regenerating, active_branch_id must be valid.
|
|
1261
|
+
if self.active_branch_id not in self._message_index: # Redundant check, but safe
|
|
1262
|
+
raise ValueError("Regeneration failed: active branch tip not found or is invalid.")
|
|
1263
|
+
user_msg_orm = self._message_index[self.active_branch_id]
|
|
1264
|
+
if user_msg_orm.sender_type != 'user':
|
|
1265
|
+
raise ValueError(f"Regeneration failed: active branch tip is a '{user_msg_orm.sender_type}' message, not 'user'.")
|
|
1266
|
+
user_msg = LollmsMessage(self, user_msg_orm)
|
|
1267
|
+
# FIX: Use get_active_images() to ensure we get a list of strings, not potentially objects/dicts.
|
|
1268
|
+
# This prevents errors if the underlying 'images' field contains new-style structured data.
|
|
1269
|
+
images = user_msg.get_active_images()
|
|
1270
|
+
|
|
1098
1271
|
# extract personality data
|
|
1099
1272
|
if personality is not None:
|
|
1100
1273
|
object.__setattr__(self, '_system_prompt', personality.system_prompt)
|
|
1101
1274
|
|
|
1102
1275
|
# --- New Data Source Handling Logic ---
|
|
1103
1276
|
if hasattr(personality, 'data_source') and personality.data_source is not None:
|
|
1104
|
-
# Placeholder for MSG_TYPE if not imported
|
|
1105
|
-
MSG_TYPE = SimpleNamespace(MSG_TYPE_STEP="step", MSG_TYPE_STEP_START="step_start", MSG_TYPE_STEP_END="step_end", MSG_TYPE_EXCEPTION="exception")
|
|
1106
1277
|
|
|
1107
1278
|
if isinstance(personality.data_source, str):
|
|
1108
1279
|
# --- Static Data Source ---
|
|
@@ -1154,6 +1325,15 @@ class LollmsDiscussion:
|
|
|
1154
1325
|
|
|
1155
1326
|
if retrieved_data:
|
|
1156
1327
|
self.personality_data_zone = retrieved_data.strip()
|
|
1328
|
+
source_item = {
|
|
1329
|
+
"title": "Personality Data Source",
|
|
1330
|
+
"content": retrieved_data,
|
|
1331
|
+
"source": personality.name if hasattr(personality, 'name') else "Personality",
|
|
1332
|
+
"query": generated_query
|
|
1333
|
+
}
|
|
1334
|
+
collected_sources.append(source_item)
|
|
1335
|
+
if callback:
|
|
1336
|
+
callback([source_item], MSG_TYPE.MSG_TYPE_SOURCES_LIST)
|
|
1157
1337
|
|
|
1158
1338
|
except Exception as e:
|
|
1159
1339
|
trace_exception(e)
|
|
@@ -1175,26 +1355,6 @@ class LollmsDiscussion:
|
|
|
1175
1355
|
if self.max_context_size is not None:
|
|
1176
1356
|
self.summarize_and_prune(self.max_context_size)
|
|
1177
1357
|
|
|
1178
|
-
# Step 1: Add user message, now including any images.
|
|
1179
|
-
if add_user_message:
|
|
1180
|
-
user_msg = self.add_message(
|
|
1181
|
-
sender=kwargs.get("user_name", "user"),
|
|
1182
|
-
sender_type="user",
|
|
1183
|
-
content=user_message,
|
|
1184
|
-
images=images,
|
|
1185
|
-
**kwargs
|
|
1186
|
-
)
|
|
1187
|
-
else: # Regeneration logic
|
|
1188
|
-
# _validate_and_set_active_branch ensures active_branch_id is valid and a leaf.
|
|
1189
|
-
# So, if we are regenerating, active_branch_id must be valid.
|
|
1190
|
-
if self.active_branch_id not in self._message_index: # Redundant check, but safe
|
|
1191
|
-
raise ValueError("Regeneration failed: active branch tip not found or is invalid.")
|
|
1192
|
-
user_msg_orm = self._message_index[self.active_branch_id]
|
|
1193
|
-
if user_msg_orm.sender_type != 'user':
|
|
1194
|
-
raise ValueError(f"Regeneration failed: active branch tip is a '{user_msg_orm.sender_type}' message, not 'user'.")
|
|
1195
|
-
user_msg = LollmsMessage(self, user_msg_orm)
|
|
1196
|
-
images = user_msg.images
|
|
1197
|
-
|
|
1198
1358
|
is_agentic_turn = (effective_use_mcps is not None and effective_use_mcps) or (use_data_store is not None and use_data_store)
|
|
1199
1359
|
|
|
1200
1360
|
start_time = datetime.now()
|
|
@@ -1240,7 +1400,7 @@ class LollmsDiscussion:
|
|
|
1240
1400
|
prompt_for_chat = self.export("markdown", branch_tip_id if branch_tip_id else self.active_branch_id)
|
|
1241
1401
|
ASCIIColors.cyan("\n" + "="*50 + f"\n--- DEBUG: SIMPLE CHAT PROMPT ---\n{prompt_for_chat}\n" + "="*50 + "\n")
|
|
1242
1402
|
|
|
1243
|
-
final_raw_response = self.lollmsClient.chat(self, images=images, **kwargs) or ""
|
|
1403
|
+
final_raw_response = self.lollmsClient.chat(self, images=images, branch_tip_id=branch_tip_id, **kwargs) or ""
|
|
1244
1404
|
|
|
1245
1405
|
if debug:
|
|
1246
1406
|
ASCIIColors.cyan("\n" + "="*50 + f"\n--- DEBUG: RAW SIMPLE CHAT RESPONSE ---\n{final_raw_response}\n" + "="*50 + "\n")
|
|
@@ -1262,9 +1422,12 @@ class LollmsDiscussion:
|
|
|
1262
1422
|
message_meta = {}
|
|
1263
1423
|
if is_agentic_turn and isinstance(agent_result, dict):
|
|
1264
1424
|
if "tool_calls" in agent_result: message_meta["tool_calls"] = agent_result["tool_calls"]
|
|
1265
|
-
if "sources" in agent_result:
|
|
1425
|
+
if "sources" in agent_result: collected_sources.extend(agent_result["sources"])
|
|
1266
1426
|
if agent_result.get("clarification_required", False): message_meta["clarification_required"] = True
|
|
1267
1427
|
|
|
1428
|
+
if collected_sources:
|
|
1429
|
+
message_meta["sources"] = collected_sources
|
|
1430
|
+
|
|
1268
1431
|
ai_message_obj = self.add_message(
|
|
1269
1432
|
sender=personality.name if personality else "assistant",
|
|
1270
1433
|
sender_type="assistant",
|
|
@@ -1282,15 +1445,13 @@ class LollmsDiscussion:
|
|
|
1282
1445
|
if self._is_db_backed and self.autosave:
|
|
1283
1446
|
self.commit()
|
|
1284
1447
|
|
|
1285
|
-
return {"user_message": user_msg, "ai_message": ai_message_obj}
|
|
1448
|
+
return {"user_message": user_msg, "ai_message": ai_message_obj, "sources": collected_sources}
|
|
1286
1449
|
|
|
1287
|
-
def regenerate_branch(self, branch_tip_id: Optional[str] = None, **kwargs) -> Dict[str,
|
|
1450
|
+
def regenerate_branch(self, branch_tip_id: Optional[str] = None, **kwargs) -> Dict[str, Any]:
|
|
1288
1451
|
"""Regenerates the AI response for a given message or the active branch's AI response.
|
|
1289
1452
|
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
If the target is a user message, all its existing AI children are deleted, and their
|
|
1293
|
-
descendants are re-parented to the user message. A new AI response is then generated.
|
|
1453
|
+
Instead of deleting the old response, this method simply starts a new generation
|
|
1454
|
+
from the parent message, creating a new branch (sibling to the original response).
|
|
1294
1455
|
|
|
1295
1456
|
Args:
|
|
1296
1457
|
branch_tip_id (Optional[str]): The ID of the message to regenerate from.
|
|
@@ -1321,23 +1482,6 @@ class LollmsDiscussion:
|
|
|
1321
1482
|
else:
|
|
1322
1483
|
raise ValueError(f"Regeneration failed: Target message '{target_id}' is of an unexpected sender type '{target_message_orm.sender_type}'.")
|
|
1323
1484
|
|
|
1324
|
-
ai_messages_to_overwrite_ids = set()
|
|
1325
|
-
if target_message_orm.sender_type == 'assistant':
|
|
1326
|
-
# If target is an AI, we only remove this specific AI message.
|
|
1327
|
-
ai_messages_to_overwrite_ids.add(target_message_orm.id)
|
|
1328
|
-
elif target_message_orm.sender_type == 'user':
|
|
1329
|
-
# If target is a user, we remove ALL AI children of this user message.
|
|
1330
|
-
for msg_obj in self._db_discussion.messages:
|
|
1331
|
-
if msg_obj.parent_id == user_msg_to_regenerate_from.id and msg_obj.sender_type == 'assistant':
|
|
1332
|
-
ai_messages_to_overwrite_ids.add(msg_obj.id)
|
|
1333
|
-
|
|
1334
|
-
if not ai_messages_to_overwrite_ids:
|
|
1335
|
-
ASCIIColors.warning(f"No AI messages found to regenerate from '{target_id}'. This might be unintended.")
|
|
1336
|
-
# If no AI messages to overwrite, just proceed with generation from user message.
|
|
1337
|
-
# No changes to existing messages needed, so skip the cleanup phase.
|
|
1338
|
-
self.active_branch_id = user_msg_to_regenerate_from.id # Ensure active branch is correct for chat
|
|
1339
|
-
return self.chat(user_message="", add_user_message=False, branch_tip_id=user_msg_to_regenerate_from.id, **kwargs)
|
|
1340
|
-
|
|
1341
1485
|
# --- Phase 1: Generate new AI response ---
|
|
1342
1486
|
# The user message for the new generation is user_msg_to_regenerate_from
|
|
1343
1487
|
self.active_branch_id = user_msg_to_regenerate_from.id
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
from abc import abstractmethod
|
|
3
3
|
import importlib
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Optional, Callable, List, Union, Dict
|
|
5
|
+
from typing import Optional, Callable, List, Union, Dict, Any
|
|
6
6
|
from ascii_colors import trace_exception, ASCIIColors
|
|
7
7
|
from lollms_client.lollms_types import MSG_TYPE
|
|
8
8
|
from lollms_client.lollms_discussion import LollmsDiscussion
|
|
@@ -212,6 +212,20 @@ class LollmsLLMBinding(LollmsBaseBinding):
|
|
|
212
212
|
"""
|
|
213
213
|
pass
|
|
214
214
|
|
|
215
|
+
def get_zoo(self) -> List[Dict[str, Any]]:
|
|
216
|
+
"""
|
|
217
|
+
Returns a list of models available for download.
|
|
218
|
+
each entry is a dict with:
|
|
219
|
+
name, description, size, type, link
|
|
220
|
+
"""
|
|
221
|
+
return []
|
|
222
|
+
|
|
223
|
+
def download_from_zoo(self, index: int, progress_callback: Callable[[dict], None] = None) -> dict:
|
|
224
|
+
"""
|
|
225
|
+
Downloads a model from the zoo using its index.
|
|
226
|
+
"""
|
|
227
|
+
return {"status": False, "message": "Not implemented"}
|
|
228
|
+
|
|
215
229
|
@abstractmethod
|
|
216
230
|
def load_model(self, model_name: str) -> bool:
|
|
217
231
|
"""
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
from abc import abstractmethod
|
|
3
3
|
import importlib
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Optional, List, Dict, Any, Union
|
|
5
|
+
from typing import Optional, List, Dict, Any, Union, Callable
|
|
6
6
|
from ascii_colors import trace_exception, ASCIIColors
|
|
7
7
|
import yaml
|
|
8
8
|
from lollms_client.lollms_base_binding import LollmsBaseBinding
|
|
@@ -52,6 +52,20 @@ class LollmsMCPBinding(LollmsBaseBinding):
|
|
|
52
52
|
Or could be implemented to return available tools as models if desired.
|
|
53
53
|
"""
|
|
54
54
|
return []
|
|
55
|
+
|
|
56
|
+
def get_zoo(self) -> List[Dict[str, Any]]:
|
|
57
|
+
"""
|
|
58
|
+
Returns a list of models available for download.
|
|
59
|
+
each entry is a dict with:
|
|
60
|
+
name, description, size, type, link
|
|
61
|
+
"""
|
|
62
|
+
return []
|
|
63
|
+
|
|
64
|
+
def download_from_zoo(self, index: int, progress_callback: Callable[[dict], None] = None) -> dict:
|
|
65
|
+
"""
|
|
66
|
+
Downloads a model from the zoo using its index.
|
|
67
|
+
"""
|
|
68
|
+
return {"status": False, "message": "Not implemented"}
|
|
55
69
|
|
|
56
70
|
class LollmsMCPBindingManager:
|
|
57
71
|
"""
|
|
@@ -67,7 +81,6 @@ class LollmsMCPBindingManager:
|
|
|
67
81
|
self.mcp_bindings_dir = (Path(__file__).parent.parent / mcp_bindings_dir).resolve()
|
|
68
82
|
|
|
69
83
|
self.available_bindings: Dict[str, type[LollmsMCPBinding]] = {}
|
|
70
|
-
ASCIIColors.info(f"LollmsMCPBindingManager initialized. Bindings directory: {self.mcp_bindings_dir}")
|
|
71
84
|
|
|
72
85
|
|
|
73
86
|
def _load_binding_class(self, binding_name: str) -> Optional[type[LollmsMCPBinding]]:
|
|
@@ -127,7 +140,6 @@ class LollmsMCPBindingManager:
|
|
|
127
140
|
if binding_class_to_instantiate:
|
|
128
141
|
try:
|
|
129
142
|
return binding_class_to_instantiate(
|
|
130
|
-
binding_name=binding_name,
|
|
131
143
|
**kwargs
|
|
132
144
|
)
|
|
133
145
|
except Exception as e:
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
from abc import abstractmethod
|
|
3
3
|
import importlib
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Optional, List, Dict, Any, Union
|
|
5
|
+
from typing import Optional, List, Dict, Any, Union, Callable
|
|
6
6
|
from ascii_colors import trace_exception
|
|
7
7
|
import yaml
|
|
8
8
|
from lollms_client.lollms_base_binding import LollmsBaseBinding
|
|
@@ -32,6 +32,20 @@ class LollmsSTTBinding(LollmsBaseBinding):
|
|
|
32
32
|
"""
|
|
33
33
|
pass
|
|
34
34
|
|
|
35
|
+
def get_zoo(self) -> List[Dict[str, Any]]:
|
|
36
|
+
"""
|
|
37
|
+
Returns a list of models available for download.
|
|
38
|
+
each entry is a dict with:
|
|
39
|
+
name, description, size, type, link
|
|
40
|
+
"""
|
|
41
|
+
return []
|
|
42
|
+
|
|
43
|
+
def download_from_zoo(self, index: int, progress_callback: Callable[[dict], None] = None) -> dict:
|
|
44
|
+
"""
|
|
45
|
+
Downloads a model from the zoo using its index.
|
|
46
|
+
"""
|
|
47
|
+
return {"status": False, "message": "Not implemented"}
|
|
48
|
+
|
|
35
49
|
class LollmsSTTBindingManager:
|
|
36
50
|
"""Manages STT binding discovery and instantiation."""
|
|
37
51
|
|
|
@@ -74,7 +88,7 @@ class LollmsSTTBindingManager:
|
|
|
74
88
|
"service_key": service_key,
|
|
75
89
|
"verify_ssl_certificate": verify_ssl_certificate
|
|
76
90
|
})
|
|
77
|
-
return binding_class(
|
|
91
|
+
return binding_class(**kwargs)
|
|
78
92
|
except Exception as e:
|
|
79
93
|
trace_exception(e)
|
|
80
94
|
print(f"Failed to instantiate STT binding {binding_name}: {str(e)}")
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from abc import abstractmethod
|
|
2
2
|
import importlib
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import Optional, List, Dict, Any, Union
|
|
4
|
+
from typing import Optional, List, Dict, Any, Union, Callable
|
|
5
5
|
from ascii_colors import trace_exception
|
|
6
6
|
import yaml
|
|
7
7
|
from lollms_client.lollms_base_binding import LollmsBaseBinding
|
|
@@ -57,6 +57,20 @@ class LollmsTTIBinding(LollmsBaseBinding):
|
|
|
57
57
|
def list_models(self) -> list:
|
|
58
58
|
"""Lists models"""
|
|
59
59
|
pass
|
|
60
|
+
|
|
61
|
+
def get_zoo(self) -> List[Dict[str, Any]]:
|
|
62
|
+
"""
|
|
63
|
+
Returns a list of models available for download.
|
|
64
|
+
each entry is a dict with:
|
|
65
|
+
name, description, size, type, link
|
|
66
|
+
"""
|
|
67
|
+
return []
|
|
68
|
+
|
|
69
|
+
def download_from_zoo(self, index: int, progress_callback: Callable[[dict], None] = None) -> dict:
|
|
70
|
+
"""
|
|
71
|
+
Downloads a model from the zoo using its index.
|
|
72
|
+
"""
|
|
73
|
+
return {"status": False, "message": "Not implemented"}
|
|
60
74
|
|
|
61
75
|
@abstractmethod
|
|
62
76
|
def set_settings(self, settings: Dict[str, Any], **kwargs) -> bool:
|
|
@@ -91,7 +105,7 @@ class LollmsTTIBindingManager:
|
|
|
91
105
|
binding_class = self.available_bindings.get(binding_name)
|
|
92
106
|
if binding_class:
|
|
93
107
|
try:
|
|
94
|
-
return binding_class(
|
|
108
|
+
return binding_class(**kwargs)
|
|
95
109
|
except Exception as e:
|
|
96
110
|
trace_exception(e)
|
|
97
111
|
print(f"Failed to instantiate TTI binding {binding_name}: {str(e)}")
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
from abc import abstractmethod
|
|
3
3
|
import importlib
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Optional, List, Dict, Any, Union
|
|
5
|
+
from typing import Optional, List, Dict, Any, Union, Callable
|
|
6
6
|
from ascii_colors import trace_exception
|
|
7
7
|
from lollms_client.lollms_base_binding import LollmsBaseBinding
|
|
8
8
|
|
|
@@ -32,6 +32,20 @@ class LollmsTTMBinding(LollmsBaseBinding):
|
|
|
32
32
|
"""
|
|
33
33
|
pass
|
|
34
34
|
|
|
35
|
+
def get_zoo(self) -> List[Dict[str, Any]]:
|
|
36
|
+
"""
|
|
37
|
+
Returns a list of models available for download.
|
|
38
|
+
each entry is a dict with:
|
|
39
|
+
name, description, size, type, link
|
|
40
|
+
"""
|
|
41
|
+
return []
|
|
42
|
+
|
|
43
|
+
def download_from_zoo(self, index: int, progress_callback: Callable[[dict], None] = None) -> dict:
|
|
44
|
+
"""
|
|
45
|
+
Downloads a model from the zoo using its index.
|
|
46
|
+
"""
|
|
47
|
+
return {"status": False, "message": "Not implemented"}
|
|
48
|
+
|
|
35
49
|
class LollmsTTMBindingManager:
|
|
36
50
|
"""Manages TTM binding discovery and instantiation."""
|
|
37
51
|
|
|
@@ -63,7 +77,7 @@ class LollmsTTMBindingManager:
|
|
|
63
77
|
binding_class = self.available_bindings.get(binding_name)
|
|
64
78
|
if binding_class:
|
|
65
79
|
try:
|
|
66
|
-
return binding_class(
|
|
80
|
+
return binding_class(**kwargs)
|
|
67
81
|
except Exception as e:
|
|
68
82
|
trace_exception(e)
|
|
69
83
|
print(f"Failed to instantiate TTM binding {binding_name}: {str(e)}")
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from abc import abstractmethod
|
|
2
2
|
import importlib
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import Optional, List, Dict, Any, Union
|
|
4
|
+
from typing import Optional, List, Dict, Any, Union, Callable
|
|
5
5
|
import yaml
|
|
6
6
|
from ascii_colors import trace_exception
|
|
7
7
|
from lollms_client.lollms_base_binding import LollmsBaseBinding
|
|
@@ -27,7 +27,21 @@ class LollmsTTSBinding(LollmsBaseBinding):
|
|
|
27
27
|
@abstractmethod
|
|
28
28
|
def list_models(self, **kwargs) -> List[str]:
|
|
29
29
|
pass
|
|
30
|
+
|
|
31
|
+
def get_zoo(self) -> List[Dict[str, Any]]:
|
|
32
|
+
"""
|
|
33
|
+
Returns a list of models available for download.
|
|
34
|
+
each entry is a dict with:
|
|
35
|
+
name, description, size, type, link
|
|
36
|
+
"""
|
|
37
|
+
return []
|
|
30
38
|
|
|
39
|
+
def download_from_zoo(self, index: int, progress_callback: Callable[[dict], None] = None) -> dict:
|
|
40
|
+
"""
|
|
41
|
+
Downloads a model from the zoo using its index.
|
|
42
|
+
"""
|
|
43
|
+
return {"status": False, "message": "Not implemented"}
|
|
44
|
+
|
|
31
45
|
def get_settings(self, **kwargs) -> Dict[str, Any]:
|
|
32
46
|
return self.settings
|
|
33
47
|
|
|
@@ -61,7 +75,7 @@ class LollmsTTSBindingManager:
|
|
|
61
75
|
|
|
62
76
|
binding_class = self.available_bindings.get(binding_name)
|
|
63
77
|
if binding_class:
|
|
64
|
-
return binding_class(
|
|
78
|
+
return binding_class(**kwargs)
|
|
65
79
|
return None
|
|
66
80
|
|
|
67
81
|
|