lollms-client 0.21.0__py3-none-any.whl → 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

@@ -1,43 +1,139 @@
1
- # research_app_final.py
2
-
3
1
  import os
2
+ import re
3
+ import yaml
4
4
  import json
5
- import shutil
6
5
  from pathlib import Path
7
- # Use the correct, specified import style
8
- from lollms_client import LollmsClient, LollmsDiscussion, MSG_TYPE, DatabaseManager
6
+ from collections import defaultdict
7
+ from typing import Dict, Optional
8
+
9
+ # --- Mock RAG Backend (for demonstration purposes) ---
10
+ # In a real app, this would be a proper vector database (ChromaDB, FAISS, etc.)
11
+ MOCK_VECTOR_DB_PATH = Path("./rag_db")
12
+ MOCK_VECTOR_DB_PATH.mkdir(exist_ok=True)
13
+
14
+ def mock_vectorize_chunk(chunk_text: str, chunk_id: str):
15
+ # Simulate vectorization by just saving the chunk text to a file.
16
+ # A real implementation would convert chunk_text to a vector and store it.
17
+ (MOCK_VECTOR_DB_PATH / f"{chunk_id}.json").write_text(json.dumps({
18
+ "id": chunk_id,
19
+ "text": chunk_text
20
+ }, indent=2))
21
+
22
+ def mock_is_vectorized(chunk_id: str) -> bool:
23
+ return (MOCK_VECTOR_DB_PATH / f"{chunk_id}.json").exists()
24
+
25
+ def mock_query_rag(user_query: str) -> str:
26
+ # Simulate RAG by doing a simple keyword search across all chunk files.
27
+ # A real implementation would do a vector similarity search.
28
+ relevant_chunks = []
29
+ query_words = set(user_query.lower().split())
30
+ if not query_words:
31
+ return ""
32
+
33
+ for file in MOCK_VECTOR_DB_PATH.glob("*.json"):
34
+ data = json.loads(file.read_text(encoding='utf-8'))
35
+ if any(word in data["text"].lower() for word in query_words):
36
+ relevant_chunks.append(data["text"])
37
+
38
+ if not relevant_chunks:
39
+ return ""
40
+
41
+ return "\n---\n".join(relevant_chunks)
42
+
43
+ # --- Library Imports ---
44
+ # Assumes lollms_client.py, lollms_discussion.py, and lollms_personality.py are in the same directory or accessible in PYTHONPATH
45
+ from lollms_client import LollmsClient, MSG_TYPE
46
+ from lollms_client.lollms_discussion import LollmsDiscussion, LollmsDataManager
47
+ from lollms_client.lollms_personality import LollmsPersonality
9
48
  from ascii_colors import ASCIIColors
10
49
  from sqlalchemy import Column, String
50
+ from sqlalchemy.exc import IntegrityError
11
51
 
12
- # --- 1. Define Application-Specific Schema ---
13
- # We define our custom fields for the database tables.
14
- # This allows applications to store and query their own metadata.
52
+ # --- Application-Specific Schema ---
15
53
  class ResearchDiscussionMixin:
16
- # We want each discussion to have a 'project_name' that we can search for.
17
- project_name = Column(String(100), index=True, nullable=False)
54
+ project_name = Column(String(100), index=True, nullable=False, unique=True)
18
55
 
19
56
  class ResearchMessageMixin:
20
- # This mixin is empty for this example.
21
- pass
57
+ pass # No custom fields needed for this demo
58
+
59
+ # --- Personality Management ---
60
+ def load_personalities(personalities_path: Path) -> Dict[str, LollmsPersonality]:
61
+ """Loads all personalities from a directory of YAML files."""
62
+ personalities = {}
63
+ if not personalities_path.is_dir():
64
+ return {}
65
+
66
+ for file_path in personalities_path.glob("*.yaml"):
67
+ try:
68
+ config = yaml.safe_load(file_path.read_text(encoding='utf-8'))
69
+
70
+ script_content = None
71
+ script_path = file_path.with_suffix(".py")
72
+ if script_path.exists():
73
+ script_content = script_path.read_text(encoding='utf-8')
74
+
75
+ # Make data file paths relative to the personalities folder
76
+ data_files = [personalities_path / f for f in config.get("data_files", [])]
77
+
78
+ personality = LollmsPersonality(
79
+ name=config.get("name", file_path.stem),
80
+ author=config.get("author", "Unknown"),
81
+ category=config.get("category", "General"),
82
+ description=config.get("description", ""),
83
+ system_prompt=config.get("system_prompt", "You are a helpful AI."),
84
+ data_files=data_files,
85
+ script=script_content,
86
+ vectorize_chunk_callback=mock_vectorize_chunk,
87
+ is_vectorized_callback=mock_is_vectorized,
88
+ query_rag_callback=mock_query_rag
89
+ )
90
+ personalities[personality.personality_id] = personality
91
+ except Exception as e:
92
+ ASCIIColors.red(f"Failed to load personality from {file_path.name}: {e}")
93
+
94
+ return personalities
22
95
 
96
+ def select_personality(personalities: Dict[str, LollmsPersonality]) -> Optional[LollmsPersonality]:
97
+ """UI for selecting a personality."""
98
+ if not personalities:
99
+ ASCIIColors.yellow("No personalities found.")
100
+ return None
101
+
102
+ print("\n--- Select a Personality ---")
103
+ sorted_p = sorted(personalities.values(), key=lambda p: (p.category, p.name))
104
+ for i, p in enumerate(sorted_p):
105
+ print(f"{i+1}. {p.category}/{p.name} (by {p.author})")
106
+ print("0. Deselect Personality")
107
+
108
+ while True:
109
+ try:
110
+ choice_str = input("> ")
111
+ if not choice_str: return None
112
+ choice = int(choice_str)
113
+ if choice == 0:
114
+ return None
115
+ if 1 <= choice <= len(sorted_p):
116
+ return sorted_p[choice - 1]
117
+ else:
118
+ ASCIIColors.red("Invalid number.")
119
+ except ValueError:
120
+ ASCIIColors.red("Please enter a number.")
121
+
122
+ # --- Main Application Logic ---
23
123
  def main():
24
- # --- 2. Setup: Lollms Client is always needed ---
25
- print("--- LOLLMS Research Assistant (Final Version) ---")
124
+ print("--- LOLLMS Advanced Agentic Framework ---")
26
125
  try:
27
- # Instantiate the real LollmsClient to connect to a running model service.
28
- # Ensure Ollama is running and has pulled the specified model.
29
126
  lc = LollmsClient("ollama", model_name="qwen3:4b")
30
127
  print("LollmsClient connected successfully to Ollama.")
31
128
  except Exception as e:
32
- print(f"\nFATAL: Could not connect to LLM binding. Is Ollama running?\nError: {e}")
129
+ print(f"\nFATAL: Could not connect to LLM binding. Is the service running?\nError: {e}")
33
130
  return
34
131
 
35
- # --- 3. Setup Database Manager ---
36
132
  DB_PATH = "sqlite:///research_projects_final.db"
37
- ENCRYPTION_KEY = "a-secure-password-for-the-database"
133
+ ENCRYPTION_KEY = "a-very-secure-password-for-the-database"
38
134
 
39
135
  try:
40
- db_manager = DatabaseManager(
136
+ db_manager = LollmsDataManager(
41
137
  db_path=DB_PATH,
42
138
  discussion_mixin=ResearchDiscussionMixin,
43
139
  message_mixin=ResearchMessageMixin,
@@ -48,160 +144,305 @@ def main():
48
144
  print(f"\nFATAL: Could not initialize database. Error: {e}")
49
145
  return
50
146
 
51
- # --- 4. Main Application Loop ---
52
- # This loop demonstrates the new management features.
53
- discussion = None
147
+ personalities_path = Path("./personalities")
148
+ personalities = load_personalities(personalities_path)
149
+ print(f"Loaded {len(personalities)} personalities.")
150
+
151
+ discussion: Optional[LollmsDiscussion] = None
152
+ personality: Optional[LollmsPersonality] = None
153
+
54
154
  while True:
55
- print("\n--- Main Menu ---")
155
+ print("\n" + "="*20 + " Main Menu " + "="*20)
56
156
  if discussion:
57
- print(f"Current Project: '{discussion.metadata.get('project_name', discussion.id)}'")
157
+ p_name = f" with '{personality.name}'" if personality else ""
158
+ ASCIIColors.cyan(f"Current Project: '{discussion.project_name}'{p_name}")
58
159
  print("c. Chat in current project")
59
- print("r. Regenerate last AI response (create new branch)")
160
+ print("r. Regenerate last AI response")
60
161
  print("l. List all projects")
61
162
  print("s. Search for a project")
62
163
  print("n. Start a new project")
63
164
  print("o. Open an existing project")
64
165
  print("d. Delete a project")
166
+ print("p. Select a Personality")
65
167
  print("e. Exit")
66
168
 
67
- choice = input("> ").lower()
169
+ choice = input("> ").lower().strip()
68
170
 
69
171
  if choice == 'c' and discussion:
70
- chat_loop(discussion)
172
+ chat_loop(discussion, personality)
71
173
  elif choice == 'r' and discussion:
72
- regenerate_response(discussion)
174
+ regenerate_response(discussion, personality)
73
175
  elif choice == 'l':
74
176
  list_all_projects(db_manager)
75
177
  elif choice == 's':
76
178
  search_for_project(db_manager)
77
179
  elif choice == 'n':
78
- discussion = start_new_project(lc, db_manager)
180
+ new_discussion = start_new_project(lc, db_manager)
181
+ if new_discussion: discussion = new_discussion
79
182
  elif choice == 'o':
80
- discussion = open_project(lc, db_manager)
183
+ new_discussion = open_project(lc, db_manager)
184
+ if new_discussion: discussion = new_discussion
81
185
  elif choice == 'd':
82
186
  delete_project(db_manager)
83
- if discussion and discussion.id not in [d['id'] for d in db_manager.list_discussions()]:
84
- discussion = None # Clear current discussion if it was deleted
187
+ if discussion and not db_manager.get_discussion(lc, discussion.id):
188
+ discussion = None
189
+ elif choice == 'p':
190
+ personality = select_personality(personalities)
191
+ if personality:
192
+ ASCIIColors.green(f"Personality '{personality.name}' selected.")
193
+ else:
194
+ ASCIIColors.yellow("No personality selected.")
85
195
  elif choice == 'e':
196
+ if discussion: discussion.close()
86
197
  break
87
198
  else:
88
199
  ASCIIColors.red("Invalid choice.")
89
200
 
90
- # --- Cleanup ---
91
- print("\n--- Demo complete. Cleaning up. ---")
92
- if os.path.exists(DB_PATH):
93
- os.remove(DB_PATH)
201
+ print("\n--- Demo complete. Database and RAG files are preserved. ---")
94
202
 
95
- def list_all_projects(db_manager: DatabaseManager):
203
+ # --- UI Functions ---
204
+ def list_all_projects(db_manager: LollmsDataManager):
96
205
  projects = db_manager.list_discussions()
97
206
  if not projects:
98
207
  ASCIIColors.yellow("No projects found.")
99
208
  return
100
209
  print("\n--- All Projects ---")
101
210
  for p in projects:
102
- print(f"- ID: {p['id']} | Name: {p.get('project_name', 'N/A')}")
211
+ print(f"- Name: {p['project_name']:<30} | ID: {p['id']}")
103
212
 
104
- def search_for_project(db_manager: DatabaseManager):
105
- term = input("Enter search term for project name: ")
213
+ def search_for_project(db_manager: LollmsDataManager):
214
+ term = input("Enter search term for project name: ").strip()
215
+ if not term: return
106
216
  projects = db_manager.search_discussions(project_name=term)
107
217
  if not projects:
108
218
  ASCIIColors.yellow(f"No projects found matching '{term}'.")
109
219
  return
110
220
  print(f"\n--- Search Results for '{term}' ---")
111
221
  for p in projects:
112
- print(f"- ID: {p['id']} | Name: {p.get('project_name', 'N/A')}")
222
+ print(f"- Name: {p['project_name']:<30} | ID: {p['id']}")
113
223
 
114
- def start_new_project(lc: LollmsClient, db_manager: DatabaseManager) -> LollmsDiscussion:
115
- name = input("Enter new project name: ")
224
+ def start_new_project(lc: LollmsClient, db_manager: LollmsDataManager) -> Optional[LollmsDiscussion]:
225
+ name = input("Enter new project name: ").strip()
116
226
  if not name:
117
227
  ASCIIColors.red("Project name cannot be empty.")
118
228
  return None
119
- discussion = LollmsDiscussion.create_new(
120
- lollms_client=lc,
121
- db_manager=db_manager,
122
- autosave=True, # Recommended for interactive apps
123
- project_name=name
124
- )
125
- discussion.system_prompt = f"This is a research project about {name}."
126
- ASCIIColors.green(f"Project '{name}' created successfully.")
127
- return discussion
229
+ try:
230
+ discussion = LollmsDiscussion.create_new(
231
+ lollms_client=lc, db_manager=db_manager,
232
+ autosave=True, project_name=name
233
+ )
234
+ ASCIIColors.green(f"Project '{name}' created successfully.")
235
+ return discussion
236
+ except IntegrityError:
237
+ ASCIIColors.red(f"Failed to create project. A project named '{name}' already exists.")
238
+ return None
239
+ except Exception as e:
240
+ ASCIIColors.red(f"An unexpected error occurred while creating the project: {e}")
241
+ return None
128
242
 
129
- def open_project(lc: LollmsClient, db_manager: DatabaseManager) -> LollmsDiscussion:
243
+ def open_project(lc: LollmsClient, db_manager: LollmsDataManager) -> Optional[LollmsDiscussion]:
130
244
  list_all_projects(db_manager)
131
- disc_id = input("Enter project ID to open: ")
245
+ disc_id = input("Enter project ID to open: ").strip()
246
+ if not disc_id: return None
132
247
  discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=disc_id, autosave=True)
133
248
  if not discussion:
134
- ASCIIColors.red("Project not found.")
135
- return None
136
- ASCIIColors.green(f"Opened project '{discussion.metadata.get('project_name', discussion.id)}'.")
249
+ ASCIIColors.red("Project not found."); return None
250
+ ASCIIColors.green(f"Opened project '{discussion.project_name}'.")
137
251
  return discussion
138
252
 
139
- def delete_project(db_manager: DatabaseManager):
253
+ def delete_project(db_manager: LollmsDataManager):
140
254
  list_all_projects(db_manager)
141
- disc_id = input("Enter project ID to DELETE: ")
142
- confirm = input(f"Are you sure you want to permanently delete project {disc_id}? (y/N): ")
143
- if confirm.lower() == 'y':
255
+ disc_id = input("Enter project ID to DELETE: ").strip()
256
+ if not disc_id: return
257
+ confirm = input(f"Are you sure you want to permanently delete project {disc_id}? (y/N): ").lower()
258
+ if confirm == 'y':
144
259
  db_manager.delete_discussion(disc_id)
145
260
  ASCIIColors.green("Project deleted.")
146
261
  else:
147
262
  ASCIIColors.yellow("Deletion cancelled.")
148
263
 
149
- def chat_loop(discussion: LollmsDiscussion):
150
- """The interactive chat session for a given discussion."""
151
- print("\n--- Entering Chat ---")
152
-
153
- # Display the current branch history when entering the chat.
264
+ def display_branch_history(discussion: LollmsDiscussion):
154
265
  current_branch = discussion.get_branch(discussion.active_branch_id)
155
- if current_branch:
156
- ASCIIColors.cyan("--- Current Conversation History ---")
157
- for msg in current_branch:
158
- sender = msg['sender']
159
- if sender == 'user':
160
- ASCIIColors.green(f"You: {msg['content']}")
161
- else:
162
- ASCIIColors.blue(f"AI: {msg['content']}")
163
- ASCIIColors.cyan("----------------------------------")
266
+ if not current_branch: return
267
+ ASCIIColors.cyan("\n--- Current Conversation History (Active Branch) ---")
268
+ for msg in current_branch:
269
+ if msg.sender_type == 'user':
270
+ ASCIIColors.green(f"\nYou: {msg.content}")
271
+ else:
272
+ ASCIIColors.blue(f"\nAI: {msg.content}")
273
+ speed_str = f"{msg.generation_speed:.1f} t/s" if msg.generation_speed is not None else "N/A"
274
+ ASCIIColors.dim(f" [Model: {msg.model_name}, Tokens: {msg.tokens}, Speed: {speed_str}]")
275
+ if msg.thoughts:
276
+ ASCIIColors.dark_gray(f" [Thoughts: {msg.thoughts[:100]}...]")
277
+ if msg.scratchpad:
278
+ ASCIIColors.yellow(f" [Scratchpad: {msg.scratchpad[:100]}...]")
279
+ ASCIIColors.cyan("-----------------------------------------------------")
280
+
281
+ def display_message_tree(discussion: LollmsDiscussion):
282
+ print("\n--- Project Message Tree ---")
283
+ messages_by_id = {msg.id: msg for msg in discussion.messages}
284
+ children_map = defaultdict(list)
285
+ root_ids = []
286
+ for msg in messages_by_id.values():
287
+ if msg.parent_id and msg.parent_id in messages_by_id:
288
+ children_map[msg.parent_id].append(msg.id)
289
+ else:
290
+ root_ids.append(msg.id)
291
+ def print_node(msg_id, indent=""):
292
+ msg = messages_by_id.get(msg_id)
293
+ if not msg: return
294
+ is_active = " (*)" if msg.id == discussion.active_branch_id else ""
295
+ color = ASCIIColors.green if msg.sender_type == "user" else ASCIIColors.blue
296
+ content_preview = re.sub(r'\s+', ' ', msg.content).strip()[:50] + "..."
297
+ color(f"{indent}├─ {msg.id[-8:]}{is_active} ({msg.sender}): {content_preview}")
298
+ for child_id in children_map.get(msg_id, []):
299
+ print_node(child_id, indent + " ")
300
+ for root_id in root_ids:
301
+ print_node(root_id)
302
+ print("----------------------------")
303
+
304
+ def handle_config_command(discussion: LollmsDiscussion):
305
+ while True:
306
+ ASCIIColors.cyan("\n--- Thought Configuration ---")
307
+ ASCIIColors.yellow(f"1. Show Thoughts during generation : {'ON' if discussion.show_thoughts else 'OFF'}")
308
+ ASCIIColors.yellow(f"2. Include Thoughts in AI context : {'ON' if discussion.include_thoughts_in_context else 'OFF'}")
309
+ ASCIIColors.yellow(f"3. Thought Placeholder text : '{discussion.thought_placeholder}'")
310
+ print("Enter number to toggle, 3 to set text, or 'back'.")
311
+ choice = input("> ").lower().strip()
312
+ if choice == '1': discussion.show_thoughts = not discussion.show_thoughts
313
+ elif choice == '2': discussion.include_thoughts_in_context = not discussion.include_thoughts_in_context
314
+ elif choice == '3': discussion.thought_placeholder = input("Enter new placeholder text: ")
315
+ elif choice == 'back': break
316
+ else: ASCIIColors.red("Invalid choice.")
317
+
318
+ def handle_info_command(discussion: LollmsDiscussion):
319
+ ASCIIColors.cyan("\n--- Discussion Info ---")
320
+ rem_tokens = discussion.remaining_tokens
321
+ if rem_tokens is not None:
322
+ max_ctx = discussion.lollmsClient.binding.ctx_size
323
+ ASCIIColors.yellow(f"Context Window: {rem_tokens} / {max_ctx} tokens remaining")
324
+ else:
325
+ ASCIIColors.yellow("Context Window: Max size not available from binding.")
326
+ handle_config_command(discussion)
164
327
 
165
- print("Type your message, or /back, /toggle_thoughts")
166
- show_thoughts_flag = False
328
+ def chat_loop(discussion: LollmsDiscussion, personality: Optional[LollmsPersonality]):
329
+ display_branch_history(discussion)
330
+
331
+ print("\n--- Entering Chat ---")
332
+ p_name = f" (with '{personality.name}')" if personality else ""
333
+ ASCIIColors.cyan(f"Commands: /back, /tree, /switch <id>, /process, /history, /config, /info{p_name}")
167
334
 
168
335
  def stream_to_console(token: str, msg_type: MSG_TYPE):
169
- if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
170
- print(token, end="", flush=True)
171
- elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
172
- ASCIIColors.magenta(token, end="", flush=True)
336
+ if msg_type == MSG_TYPE.MSG_TYPE_CHUNK: print(token, end="", flush=True)
337
+ elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK: ASCIIColors.magenta(token, end="", flush=True)
173
338
  return True
174
339
 
175
340
  while True:
176
- user_input = input("\nYou > ")
341
+ user_input = input("\nYou > ").strip()
342
+ if not user_input: continue
177
343
  if user_input.lower() == '/back': break
178
-
179
- if user_input.lower() == '/toggle_thoughts':
180
- show_thoughts_flag = not show_thoughts_flag
181
- ASCIIColors.yellow(f"\n[{'ON' if show_thoughts_flag else 'OFF'}] Thoughts are now displayed.")
344
+ if user_input.lower() == '/history': display_branch_history(discussion); continue
345
+ if user_input.lower() == '/tree': display_message_tree(discussion); continue
346
+ if user_input.lower() == '/config': handle_config_command(discussion); continue
347
+ if user_input.lower() == '/info': handle_info_command(discussion); continue
348
+
349
+ if user_input.lower().startswith('/switch '):
350
+ try:
351
+ msg_id_part = user_input.split(' ', 1)[1]
352
+ # Find the full message ID from the partial one
353
+ full_id = next((mid for mid in discussion._message_index if mid.endswith(msg_id_part)), None)
354
+ if not full_id: raise ValueError(f"No message found ending with '{msg_id_part}'")
355
+ discussion.switch_to_branch(full_id)
356
+ ASCIIColors.green(f"Switched to branch ending at message {full_id}.")
357
+ display_branch_history(discussion)
358
+ except IndexError: ASCIIColors.red("Usage: /switch <last_8_chars_of_id>")
359
+ except ValueError as e: ASCIIColors.red(f"Error: {e}")
360
+ continue
361
+
362
+ if user_input.lower() == '/process':
363
+ try:
364
+ file_path_str = input("Enter path to text file: ").strip()
365
+ chunk_size_str = input("Enter chunk size in characters [4096]: ").strip() or "4096"
366
+ file_path = Path(file_path_str)
367
+ if not file_path.exists():
368
+ ASCIIColors.red(f"File not found: {file_path}"); continue
369
+ large_text = file_path.read_text(encoding='utf-8')
370
+ ASCIIColors.yellow(f"Read {len(large_text)} characters from file.")
371
+ user_prompt = input("What should I do with this text? > ").strip()
372
+ if not user_prompt:
373
+ ASCIIColors.red("Prompt cannot be empty."); continue
374
+
375
+ ASCIIColors.blue("AI is processing the document...")
376
+ ai_message = discussion.process_and_summarize(large_text, user_prompt, chunk_size=int(chunk_size_str))
377
+ ASCIIColors.blue(f"\nAI: {ai_message.content}")
378
+ if ai_message.scratchpad:
379
+ ASCIIColors.yellow(f" [AI's Scratchpad: {ai_message.scratchpad[:150]}...]")
380
+ except Exception as e: ASCIIColors.red(f"An error occurred during processing: {e}")
182
381
  continue
183
382
 
184
383
  print("AI > ", end="", flush=True)
185
- discussion.chat(
186
- user_input,
187
- show_thoughts=show_thoughts_flag,
188
- streaming_callback=stream_to_console
189
- )
384
+ discussion.chat(user_input, personality=personality, streaming_callback=stream_to_console)
190
385
  print()
191
386
 
192
- def regenerate_response(discussion: LollmsDiscussion):
193
- """Demonstrates creating a new branch by regenerating."""
387
+ def regenerate_response(discussion: LollmsDiscussion, personality: Optional[LollmsPersonality]):
194
388
  try:
195
- ASCIIColors.yellow("\nRegenerating last AI response...")
389
+ ASCIIColors.yellow("\nRegenerating last AI response (new branch will be created)...")
196
390
  print("New AI > ", end="", flush=True)
197
391
  def stream_to_console(token: str, msg_type: MSG_TYPE):
198
- print(token, end="", flush=True)
392
+ if msg_type == MSG_TYPE.MSG_TYPE_CHUNK: print(token, end="", flush=True)
393
+ elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK: ASCIIColors.magenta(token, end="", flush=True)
199
394
  return True
200
- discussion.regenerate_branch(show_thoughts=True, streaming_callback=stream_to_console)
395
+ discussion.regenerate_branch(personality=personality, streaming_callback=stream_to_console)
201
396
  print()
202
- ASCIIColors.green("New branch created.")
203
- except ValueError as e:
397
+ ASCIIColors.green(f"New branch created. Active message is now {discussion.active_branch_id}")
398
+ ASCIIColors.cyan("Use '/tree' to see the branching structure.")
399
+ except (ValueError, AttributeError) as e:
204
400
  ASCIIColors.red(f"Could not regenerate: {e}")
205
401
 
206
402
  if __name__ == "__main__":
403
+ # --- Create dummy personalities and data for first-time run ---
404
+ personalities_folder = Path("./personalities")
405
+ personalities_folder.mkdir(exist_ok=True)
406
+
407
+ lollms_facts_file = personalities_folder / "lollms_facts.txt"
408
+ if not lollms_facts_file.exists():
409
+ lollms_facts_file.write_text(
410
+ "LoLLMs is a project created by ParisNeo. It stands for Lord of Large Language Models. It aims to provide a unified interface for all LLMs. The client library allows for advanced discussion and agentic features."
411
+ )
412
+
413
+ lollms_expert_yaml = personalities_folder / "lollms_expert.yaml"
414
+ if not lollms_expert_yaml.exists():
415
+ lollms_expert_yaml.write_text("""
416
+ name: LoLLMs Expert
417
+ author: Manual
418
+ category: AI Tools
419
+ description: An expert on the LoLLMs project.
420
+ system_prompt: You are an expert on the LoLLMs project. Answer questions based on the provided information. Be concise.
421
+ data_files:
422
+ - lollms_facts.txt
423
+ """)
424
+
425
+ parrot_yaml = personalities_folder / "parrot.yaml"
426
+ if not parrot_yaml.exists():
427
+ parrot_yaml.write_text("""
428
+ name: Parrot
429
+ author: Manual
430
+ category: Fun
431
+ description: A personality that just repeats what you say.
432
+ system_prompt: You are a parrot. You must start every sentence with 'Squawk!'.
433
+ """)
434
+
435
+ parrot_py = personalities_folder / "parrot.py"
436
+ if not parrot_py.exists():
437
+ parrot_py.write_text("""
438
+ def run(discussion, on_chunk_callback):
439
+ # This script overrides the normal chat flow.
440
+ user_message = discussion.get_branch(discussion.active_branch_id)[-1].content
441
+ response = f"Squawk! {user_message}! Squawk!"
442
+ if on_chunk_callback:
443
+ # We need to simulate the message type for the callback
444
+ from lollms_client import MSG_TYPE
445
+ on_chunk_callback(response, MSG_TYPE.MSG_TYPE_CHUNK)
446
+ return response # Return the full raw response
447
+ """)
207
448
  main()
@@ -6,7 +6,7 @@ import shutil
6
6
  import gradio as gr
7
7
  from pathlib import Path
8
8
  # Use the correct, specified import style
9
- from lollms_client import LollmsClient, LollmsDiscussion, MSG_TYPE, DatabaseManager
9
+ from lollms_client import LollmsClient, LollmsDiscussion, MSG_TYPE, LollmsDataManager
10
10
  from ascii_colors import ASCIIColors
11
11
  from sqlalchemy import Column, String
12
12
 
@@ -22,7 +22,7 @@ class ResearchMessageMixin:
22
22
  # These are initialized once and used throughout the app's lifecycle.
23
23
  try:
24
24
  lc = LollmsClient("ollama", model_name="mistral-nemo:latest")
25
- db_manager = DatabaseManager(
25
+ db_manager = LollmsDataManager(
26
26
  db_path="sqlite:///research_projects_gradio.db",
27
27
  discussion_mixin=ResearchDiscussionMixin,
28
28
  message_mixin=ResearchMessageMixin,
@@ -6,7 +6,7 @@ import shutil
6
6
  from pathlib import Path
7
7
  # Use the correct, specified import style
8
8
  from lollms_client import LollmsClient
9
- from lollms_client.lollms_discussion import DatabaseManager, LollmsDiscussion
9
+ from lollms_client.lollms_discussion import LollmsDataManager, LollmsDiscussion
10
10
  from lollms_client.lollms_types import MSG_TYPE
11
11
  from sqlalchemy import Column, String
12
12
 
@@ -86,8 +86,8 @@ def main():
86
86
  MIGRATION_FOLDER = Path("./old_discussions")
87
87
 
88
88
  try:
89
- # Initialize the DatabaseManager with our schema and encryption key.
90
- db_manager = DatabaseManager(
89
+ # Initialize the LollmsDataManager with our schema and encryption key.
90
+ db_manager = LollmsDataManager(
91
91
  db_path=DB_PATH,
92
92
  discussion_mixin=ResearchDiscussionMixin,
93
93
  message_mixin=ResearchMessageMixin,
lollms_client/__init__.py CHANGED
@@ -1,13 +1,14 @@
1
1
  # lollms_client/__init__.py
2
2
  from lollms_client.lollms_core import LollmsClient, ELF_COMPLETION_FORMAT
3
3
  from lollms_client.lollms_types import MSG_TYPE # Assuming ELF_GENERATION_FORMAT is not directly used by users from here
4
- from lollms_client.lollms_discussion import LollmsDiscussion, DatabaseManager
4
+ from lollms_client.lollms_discussion import LollmsDiscussion, LollmsDataManager, LollmsMessage
5
+ from lollms_client.lollms_personality import LollmsPersonality
5
6
  from lollms_client.lollms_utilities import PromptReshaper # Keep general utilities
6
7
  # Import new MCP binding classes
7
8
  from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
8
9
 
9
10
 
10
- __version__ = "0.21.0" # Updated version
11
+ __version__ = "0.22.0" # Updated version
11
12
 
12
13
  # Optionally, you could define __all__ if you want to be explicit about exports
13
14
  __all__ = [
@@ -15,7 +16,9 @@ __all__ = [
15
16
  "ELF_COMPLETION_FORMAT",
16
17
  "MSG_TYPE",
17
18
  "LollmsDiscussion",
18
- "DatabaseManager",
19
+ "LollmsMessage",
20
+ "LollmsPersonality",
21
+ "LollmsDataManager",
19
22
  "PromptReshaper",
20
23
  "LollmsMCPBinding", # Export LollmsMCPBinding ABC
21
24
  "LollmsMCPBindingManager", # Export LollmsMCPBindingManager
@@ -76,7 +76,9 @@ class LollmsClient():
76
76
  n_threads: int = 8,
77
77
  streaming_callback: Optional[Callable[[str, MSG_TYPE], None]] = None,
78
78
  user_name ="user",
79
- ai_name = "assistant"):
79
+ ai_name = "assistant",
80
+ **kwargs
81
+ ):
80
82
  """
81
83
  Initialize the LollmsClient with LLM and optional modality bindings.
82
84