lollms-client 0.20.9__tar.gz → 0.21.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

Files changed (93) hide show
  1. {lollms_client-0.20.9/lollms_client.egg-info → lollms_client-0.21.0}/PKG-INFO +1 -1
  2. lollms_client-0.21.0/examples/console_discussion.py +207 -0
  3. lollms_client-0.21.0/examples/gradio_lollms_chat.py +259 -0
  4. lollms_client-0.21.0/examples/lollms_discussions_test.py +155 -0
  5. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/__init__.py +3 -3
  6. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/ollama/__init__.py +1 -1
  7. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_core.py +83 -1
  8. lollms_client-0.21.0/lollms_client/lollms_discussion.py +633 -0
  9. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_types.py +19 -16
  10. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_utilities.py +71 -57
  11. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/mcp_bindings/remote_mcp/__init__.py +2 -1
  12. {lollms_client-0.20.9 → lollms_client-0.21.0/lollms_client.egg-info}/PKG-INFO +1 -1
  13. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client.egg-info/SOURCES.txt +3 -3
  14. lollms_client-0.20.9/examples/personality_test/chat_test.py +0 -37
  15. lollms_client-0.20.9/examples/personality_test/chat_with_aristotle.py +0 -42
  16. lollms_client-0.20.9/examples/personality_test/tesks_test.py +0 -62
  17. lollms_client-0.20.9/lollms_client/lollms_discussion.py +0 -412
  18. {lollms_client-0.20.9 → lollms_client-0.21.0}/LICENSE +0 -0
  19. {lollms_client-0.20.9 → lollms_client-0.21.0}/README.md +0 -0
  20. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/article_summary/article_summary.py +0 -0
  21. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/deep_analyze/deep_analyse.py +0 -0
  22. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/deep_analyze/deep_analyze_multiple_files.py +0 -0
  23. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/external_mcp.py +0 -0
  24. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/function_calling_with_local_custom_mcp.py +0 -0
  25. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/generate_a_benchmark_for_safe_store.py +0 -0
  26. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/generate_and_speak/generate_and_speak.py +0 -0
  27. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/generate_game_sfx/generate_game_fx.py +0 -0
  28. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/generate_text_with_multihop_rag_example.py +0 -0
  29. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/gradio_chat_app.py +0 -0
  30. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/internet_search_with_rag.py +0 -0
  31. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/local_mcp.py +0 -0
  32. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/openai_mcp.py +0 -0
  33. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/run_remote_mcp_example copy.py +0 -0
  34. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/run_standard_mcp_example.py +0 -0
  35. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/simple_text_gen_test.py +0 -0
  36. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/simple_text_gen_with_image_test.py +0 -0
  37. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/test_local_models/local_chat.py +0 -0
  38. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/text_2_audio.py +0 -0
  39. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/text_2_image.py +0 -0
  40. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/text_2_image_diffusers.py +0 -0
  41. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/text_and_image_2_audio.py +0 -0
  42. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/text_gen.py +0 -0
  43. {lollms_client-0.20.9 → lollms_client-0.21.0}/examples/text_gen_system_prompt.py +0 -0
  44. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/__init__.py +0 -0
  45. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/llamacpp/__init__.py +0 -0
  46. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/lollms/__init__.py +0 -0
  47. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/openai/__init__.py +0 -0
  48. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/openllm/__init__.py +0 -0
  49. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/pythonllamacpp/__init__.py +0 -0
  50. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/tensor_rt/__init__.py +0 -0
  51. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/transformers/__init__.py +0 -0
  52. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/llm_bindings/vllm/__init__.py +0 -0
  53. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_config.py +0 -0
  54. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_js_analyzer.py +0 -0
  55. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_llm_binding.py +0 -0
  56. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_mcp_binding.py +0 -0
  57. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_python_analyzer.py +0 -0
  58. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_stt_binding.py +0 -0
  59. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_tti_binding.py +0 -0
  60. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_ttm_binding.py +0 -0
  61. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_tts_binding.py +0 -0
  62. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/lollms_ttv_binding.py +0 -0
  63. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/mcp_bindings/local_mcp/__init__.py +0 -0
  64. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/mcp_bindings/local_mcp/default_tools/file_writer/file_writer.py +0 -0
  65. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/mcp_bindings/local_mcp/default_tools/generate_image_from_prompt/generate_image_from_prompt.py +0 -0
  66. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/mcp_bindings/local_mcp/default_tools/internet_search/internet_search.py +0 -0
  67. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/mcp_bindings/local_mcp/default_tools/python_interpreter/python_interpreter.py +0 -0
  68. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/mcp_bindings/standard_mcp/__init__.py +0 -0
  69. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/stt_bindings/__init__.py +0 -0
  70. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/stt_bindings/lollms/__init__.py +0 -0
  71. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/stt_bindings/whisper/__init__.py +0 -0
  72. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/stt_bindings/whispercpp/__init__.py +0 -0
  73. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tti_bindings/__init__.py +0 -0
  74. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tti_bindings/dalle/__init__.py +0 -0
  75. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tti_bindings/diffusers/__init__.py +0 -0
  76. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tti_bindings/gemini/__init__.py +0 -0
  77. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tti_bindings/lollms/__init__.py +0 -0
  78. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/ttm_bindings/__init__.py +0 -0
  79. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/ttm_bindings/audiocraft/__init__.py +0 -0
  80. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/ttm_bindings/bark/__init__.py +0 -0
  81. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/ttm_bindings/lollms/__init__.py +0 -0
  82. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tts_bindings/__init__.py +0 -0
  83. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tts_bindings/bark/__init__.py +0 -0
  84. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tts_bindings/lollms/__init__.py +0 -0
  85. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tts_bindings/piper_tts/__init__.py +0 -0
  86. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/tts_bindings/xtts/__init__.py +0 -0
  87. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/ttv_bindings/__init__.py +0 -0
  88. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client/ttv_bindings/lollms/__init__.py +0 -0
  89. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client.egg-info/dependency_links.txt +0 -0
  90. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client.egg-info/requires.txt +0 -0
  91. {lollms_client-0.20.9 → lollms_client-0.21.0}/lollms_client.egg-info/top_level.txt +0 -0
  92. {lollms_client-0.20.9 → lollms_client-0.21.0}/pyproject.toml +0 -0
  93. {lollms_client-0.20.9 → lollms_client-0.21.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lollms_client
3
- Version: 0.20.9
3
+ Version: 0.21.0
4
4
  Summary: A client library for LoLLMs generate endpoint
5
5
  Author-email: ParisNeo <parisneoai@gmail.com>
6
6
  License: Apache Software License
@@ -0,0 +1,207 @@
1
+ # research_app_final.py
2
+
3
+ import os
4
+ import json
5
+ import shutil
6
+ from pathlib import Path
7
+ # Use the correct, specified import style
8
+ from lollms_client import LollmsClient, LollmsDiscussion, MSG_TYPE, DatabaseManager
9
+ from ascii_colors import ASCIIColors
10
+ from sqlalchemy import Column, String
11
+
12
+ # --- 1. Define Application-Specific Schema ---
13
+ # We define our custom fields for the database tables.
14
+ # This allows applications to store and query their own metadata.
15
+ class ResearchDiscussionMixin:
16
+ # We want each discussion to have a 'project_name' that we can search for.
17
+ project_name = Column(String(100), index=True, nullable=False)
18
+
19
+ class ResearchMessageMixin:
20
+ # This mixin is empty for this example.
21
+ pass
22
+
23
+ def main():
24
+ # --- 2. Setup: Lollms Client is always needed ---
25
+ print("--- LOLLMS Research Assistant (Final Version) ---")
26
+ try:
27
+ # Instantiate the real LollmsClient to connect to a running model service.
28
+ # Ensure Ollama is running and has pulled the specified model.
29
+ lc = LollmsClient("ollama", model_name="qwen3:4b")
30
+ print("LollmsClient connected successfully to Ollama.")
31
+ except Exception as e:
32
+ print(f"\nFATAL: Could not connect to LLM binding. Is Ollama running?\nError: {e}")
33
+ return
34
+
35
+ # --- 3. Setup Database Manager ---
36
+ DB_PATH = "sqlite:///research_projects_final.db"
37
+ ENCRYPTION_KEY = "a-secure-password-for-the-database"
38
+
39
+ try:
40
+ db_manager = DatabaseManager(
41
+ db_path=DB_PATH,
42
+ discussion_mixin=ResearchDiscussionMixin,
43
+ message_mixin=ResearchMessageMixin,
44
+ encryption_key=ENCRYPTION_KEY
45
+ )
46
+ print(f"Database setup complete. Encryption is ENABLED.")
47
+ except Exception as e:
48
+ print(f"\nFATAL: Could not initialize database. Error: {e}")
49
+ return
50
+
51
+ # --- 4. Main Application Loop ---
52
+ # This loop demonstrates the new management features.
53
+ discussion = None
54
+ while True:
55
+ print("\n--- Main Menu ---")
56
+ if discussion:
57
+ print(f"Current Project: '{discussion.metadata.get('project_name', discussion.id)}'")
58
+ print("c. Chat in current project")
59
+ print("r. Regenerate last AI response (create new branch)")
60
+ print("l. List all projects")
61
+ print("s. Search for a project")
62
+ print("n. Start a new project")
63
+ print("o. Open an existing project")
64
+ print("d. Delete a project")
65
+ print("e. Exit")
66
+
67
+ choice = input("> ").lower()
68
+
69
+ if choice == 'c' and discussion:
70
+ chat_loop(discussion)
71
+ elif choice == 'r' and discussion:
72
+ regenerate_response(discussion)
73
+ elif choice == 'l':
74
+ list_all_projects(db_manager)
75
+ elif choice == 's':
76
+ search_for_project(db_manager)
77
+ elif choice == 'n':
78
+ discussion = start_new_project(lc, db_manager)
79
+ elif choice == 'o':
80
+ discussion = open_project(lc, db_manager)
81
+ elif choice == 'd':
82
+ delete_project(db_manager)
83
+ if discussion and discussion.id not in [d['id'] for d in db_manager.list_discussions()]:
84
+ discussion = None # Clear current discussion if it was deleted
85
+ elif choice == 'e':
86
+ break
87
+ else:
88
+ ASCIIColors.red("Invalid choice.")
89
+
90
+ # --- Cleanup ---
91
+ print("\n--- Demo complete. Cleaning up. ---")
92
+ if os.path.exists(DB_PATH):
93
+ os.remove(DB_PATH)
94
+
95
+ def list_all_projects(db_manager: DatabaseManager):
96
+ projects = db_manager.list_discussions()
97
+ if not projects:
98
+ ASCIIColors.yellow("No projects found.")
99
+ return
100
+ print("\n--- All Projects ---")
101
+ for p in projects:
102
+ print(f"- ID: {p['id']} | Name: {p.get('project_name', 'N/A')}")
103
+
104
+ def search_for_project(db_manager: DatabaseManager):
105
+ term = input("Enter search term for project name: ")
106
+ projects = db_manager.search_discussions(project_name=term)
107
+ if not projects:
108
+ ASCIIColors.yellow(f"No projects found matching '{term}'.")
109
+ return
110
+ print(f"\n--- Search Results for '{term}' ---")
111
+ for p in projects:
112
+ print(f"- ID: {p['id']} | Name: {p.get('project_name', 'N/A')}")
113
+
114
+ def start_new_project(lc: LollmsClient, db_manager: DatabaseManager) -> LollmsDiscussion:
115
+ name = input("Enter new project name: ")
116
+ if not name:
117
+ ASCIIColors.red("Project name cannot be empty.")
118
+ return None
119
+ discussion = LollmsDiscussion.create_new(
120
+ lollms_client=lc,
121
+ db_manager=db_manager,
122
+ autosave=True, # Recommended for interactive apps
123
+ project_name=name
124
+ )
125
+ discussion.system_prompt = f"This is a research project about {name}."
126
+ ASCIIColors.green(f"Project '{name}' created successfully.")
127
+ return discussion
128
+
129
+ def open_project(lc: LollmsClient, db_manager: DatabaseManager) -> LollmsDiscussion:
130
+ list_all_projects(db_manager)
131
+ disc_id = input("Enter project ID to open: ")
132
+ discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=disc_id, autosave=True)
133
+ if not discussion:
134
+ ASCIIColors.red("Project not found.")
135
+ return None
136
+ ASCIIColors.green(f"Opened project '{discussion.metadata.get('project_name', discussion.id)}'.")
137
+ return discussion
138
+
139
+ def delete_project(db_manager: DatabaseManager):
140
+ list_all_projects(db_manager)
141
+ disc_id = input("Enter project ID to DELETE: ")
142
+ confirm = input(f"Are you sure you want to permanently delete project {disc_id}? (y/N): ")
143
+ if confirm.lower() == 'y':
144
+ db_manager.delete_discussion(disc_id)
145
+ ASCIIColors.green("Project deleted.")
146
+ else:
147
+ ASCIIColors.yellow("Deletion cancelled.")
148
+
149
+ def chat_loop(discussion: LollmsDiscussion):
150
+ """The interactive chat session for a given discussion."""
151
+ print("\n--- Entering Chat ---")
152
+
153
+ # Display the current branch history when entering the chat.
154
+ current_branch = discussion.get_branch(discussion.active_branch_id)
155
+ if current_branch:
156
+ ASCIIColors.cyan("--- Current Conversation History ---")
157
+ for msg in current_branch:
158
+ sender = msg['sender']
159
+ if sender == 'user':
160
+ ASCIIColors.green(f"You: {msg['content']}")
161
+ else:
162
+ ASCIIColors.blue(f"AI: {msg['content']}")
163
+ ASCIIColors.cyan("----------------------------------")
164
+
165
+ print("Type your message, or /back, /toggle_thoughts")
166
+ show_thoughts_flag = False
167
+
168
+ def stream_to_console(token: str, msg_type: MSG_TYPE):
169
+ if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
170
+ print(token, end="", flush=True)
171
+ elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
172
+ ASCIIColors.magenta(token, end="", flush=True)
173
+ return True
174
+
175
+ while True:
176
+ user_input = input("\nYou > ")
177
+ if user_input.lower() == '/back': break
178
+
179
+ if user_input.lower() == '/toggle_thoughts':
180
+ show_thoughts_flag = not show_thoughts_flag
181
+ ASCIIColors.yellow(f"\n[{'ON' if show_thoughts_flag else 'OFF'}] Thoughts are now displayed.")
182
+ continue
183
+
184
+ print("AI > ", end="", flush=True)
185
+ discussion.chat(
186
+ user_input,
187
+ show_thoughts=show_thoughts_flag,
188
+ streaming_callback=stream_to_console
189
+ )
190
+ print()
191
+
192
+ def regenerate_response(discussion: LollmsDiscussion):
193
+ """Demonstrates creating a new branch by regenerating."""
194
+ try:
195
+ ASCIIColors.yellow("\nRegenerating last AI response...")
196
+ print("New AI > ", end="", flush=True)
197
+ def stream_to_console(token: str, msg_type: MSG_TYPE):
198
+ print(token, end="", flush=True)
199
+ return True
200
+ discussion.regenerate_branch(show_thoughts=True, streaming_callback=stream_to_console)
201
+ print()
202
+ ASCIIColors.green("New branch created.")
203
+ except ValueError as e:
204
+ ASCIIColors.red(f"Could not regenerate: {e}")
205
+
206
+ if __name__ == "__main__":
207
+ main()
@@ -0,0 +1,259 @@
1
+ # research_app_final.py
2
+
3
+ import os
4
+ import json
5
+ import shutil
6
+ import gradio as gr
7
+ from pathlib import Path
8
+ # Use the correct, specified import style
9
+ from lollms_client import LollmsClient, LollmsDiscussion, MSG_TYPE, DatabaseManager
10
+ from ascii_colors import ASCIIColors
11
+ from sqlalchemy import Column, String
12
+
13
+ # --- 1. Define Application-Specific Schema ---
14
+ # This allows applications to store and query their own metadata in the database.
15
+ class ResearchDiscussionMixin:
16
+ project_name = Column(String(100), index=True, nullable=False)
17
+
18
+ class ResearchMessageMixin:
19
+ pass
20
+
21
+ # --- 2. Global Setup: Client and Database ---
22
+ # These are initialized once and used throughout the app's lifecycle.
23
+ try:
24
+ lc = LollmsClient("ollama", model_name="mistral-nemo:latest")
25
+ db_manager = DatabaseManager(
26
+ db_path="sqlite:///research_projects_gradio.db",
27
+ discussion_mixin=ResearchDiscussionMixin,
28
+ message_mixin=ResearchMessageMixin,
29
+ encryption_key="a-super-secret-key-for-the-gradio-app"
30
+ )
31
+ print("✅ Client and Database initialized successfully.")
32
+ except Exception as e:
33
+ print(f"❌ FATAL: Could not initialize services. Is Ollama running? Error: {e}")
34
+ lc = None
35
+ db_manager = None
36
+
37
+ # --- 3. UI Helper Functions ---
38
+ # These functions connect the Gradio UI to our discussion library's backend logic.
39
+
40
+ def _get_discussion_list():
41
+ """Helper to fetch and format the list of discussions for the dropdown."""
42
+ if not db_manager: return []
43
+ discussions = db_manager.list_discussions()
44
+ return [(d.get('project_name', d['id']), d['id']) for d in discussions]
45
+
46
+ def _format_chatbot_history(discussion: LollmsDiscussion):
47
+ """Converts a discussion's active branch into Gradio's chatbot format."""
48
+ history = []
49
+ if not discussion: return history
50
+
51
+ branch = discussion.get_branch(discussion.active_branch_id)
52
+ # This robust loop correctly pairs user and AI messages.
53
+ i = 0
54
+ while i < len(branch):
55
+ if branch[i]['sender_type'] == 'user':
56
+ user_msg = branch[i]['content']
57
+ if i + 1 < len(branch) and branch[i+1]['sender_type'] == 'assistant':
58
+ ai_msg = branch[i+1]['content']
59
+ history.append((user_msg, ai_msg))
60
+ i += 2
61
+ else:
62
+ history.append((user_msg, None))
63
+ i += 1
64
+ else:
65
+ ai_msg = branch[i]['content']
66
+ history.append((None, ai_msg))
67
+ i += 1
68
+ return history
69
+
70
+ # --- 4. Gradio UI Event Handler Functions ---
71
+
72
+ def handle_new_discussion(name: str):
73
+ """Called when the 'New Project' button is clicked."""
74
+ if not name.strip():
75
+ gr.Warning("Project name cannot be empty.")
76
+ return gr.Dropdown(choices=_get_discussion_list()), None, []
77
+
78
+ discussion = LollmsDiscussion.create_new(
79
+ lollms_client=lc,
80
+ db_manager=db_manager,
81
+ project_name=name.strip(),
82
+ autosave=True
83
+ )
84
+ discussion.set_system_prompt(f"This is a research project about {name.strip()}. Be helpful and concise, but use <think> tags to outline your process before answering.")
85
+ discussion.set_participants({"user":"user", "assistant":"assistant"})
86
+
87
+ gr.Info(f"Project '{name.strip()}' created!")
88
+
89
+ return gr.Dropdown(choices=_get_discussion_list(), value=discussion.id), discussion.id, []
90
+
91
+ def handle_load_discussion(discussion_id: str):
92
+ """Called when a discussion is selected from the dropdown."""
93
+ if not discussion_id:
94
+ return None, []
95
+
96
+ discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id)
97
+ chatbot_history = _format_chatbot_history(discussion)
98
+
99
+ return chatbot_history
100
+
101
+ def handle_delete_discussion(discussion_id: str):
102
+ """Called when the 'Delete' button is clicked."""
103
+ if not discussion_id:
104
+ gr.Warning("No project selected to delete.")
105
+ return gr.Dropdown(choices=_get_discussion_list()), None, []
106
+
107
+ db_manager.delete_discussion(discussion_id)
108
+ gr.Info("Project deleted.")
109
+
110
+ return gr.Dropdown(choices=_get_discussion_list(), value=None), None, []
111
+
112
+ def handle_chat_submit(user_input: str, chatbot_history: list, discussion_id: str, show_thoughts: bool):
113
+ """The main chat handler, called on message submit. Uses a generator for streaming."""
114
+ if not discussion_id:
115
+ gr.Warning("Please select or create a project first.")
116
+ return "", chatbot_history
117
+ if not user_input.strip():
118
+ return "", chatbot_history
119
+
120
+ discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id, autosave=True)
121
+
122
+ chatbot_history.append((user_input, None))
123
+ yield "", chatbot_history
124
+
125
+ ai_message_buffer = ""
126
+
127
+ def stream_to_chatbot(token: str, msg_type: MSG_TYPE):
128
+ nonlocal ai_message_buffer
129
+ if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
130
+ ai_message_buffer += token
131
+ chatbot_history[-1] = (user_input, ai_message_buffer)
132
+ elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
133
+ thought_html = f"<p style='color:magenta;'><i>{token}</i></p>"
134
+ chatbot_history[-1] = (user_input, ai_message_buffer + thought_html)
135
+ return True
136
+
137
+ discussion.chat(
138
+ user_message=user_input,
139
+ show_thoughts=show_thoughts,
140
+ streaming_callback=stream_to_chatbot
141
+ )
142
+
143
+ yield "", chatbot_history
144
+
145
+ def handle_regenerate(chatbot_history: list, discussion_id: str, show_thoughts: bool):
146
+ """Called to regenerate the last AI response."""
147
+ if not discussion_id:
148
+ gr.Warning("Please select a project first.")
149
+ return chatbot_history
150
+ if not chatbot_history or chatbot_history[-1][1] is None:
151
+ gr.Warning("Nothing to regenerate.")
152
+ return chatbot_history
153
+
154
+ discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id, autosave=True)
155
+
156
+ chatbot_history.pop()
157
+ user_input_for_ui = chatbot_history[-1][0] if chatbot_history else ""
158
+ chatbot_history.append((user_input_for_ui, None))
159
+ yield chatbot_history
160
+
161
+ ai_message_buffer = ""
162
+
163
+ def stream_to_chatbot(token: str, msg_type: MSG_TYPE):
164
+ nonlocal ai_message_buffer
165
+ if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
166
+ ai_message_buffer += token
167
+ chatbot_history[-1] = (user_input_for_ui, ai_message_buffer)
168
+ elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
169
+ thought_html = f"<p style='color:magenta;'><i>{token}</i></p>"
170
+ chatbot_history[-1] = (user_input_for_ui, ai_message_buffer + thought_html)
171
+ return True
172
+
173
+ discussion.regenerate_branch(
174
+ show_thoughts=show_thoughts,
175
+ streaming_callback=stream_to_chatbot
176
+ )
177
+
178
+ yield chatbot_history
179
+
180
+ # --- 5. Build and Launch the Gradio App ---
181
+ with gr.Blocks(theme=gr.themes.Soft(), title="Lollms Discussion Manager") as demo:
182
+ discussion_id_state = gr.State(None)
183
+
184
+ gr.Markdown("# Lollms Discussion Manager")
185
+
186
+ with gr.Row():
187
+ with gr.Column(scale=1, min_width=300):
188
+ gr.Markdown("## Projects")
189
+ discussion_dd = gr.Dropdown(
190
+ choices=_get_discussion_list(),
191
+ label="Select Project",
192
+ interactive=True
193
+ )
194
+ with gr.Accordion("Manage Projects", open=False):
195
+ new_discussion_name = gr.Textbox(label="New Project Name", placeholder="Enter name and press button")
196
+ with gr.Row():
197
+ new_discussion_btn = gr.Button("➕ New")
198
+ delete_discussion_btn = gr.Button("❌ Delete")
199
+
200
+ gr.Markdown("---")
201
+ gr.Markdown("## Options")
202
+ show_thoughts_check = gr.Checkbox(label="Show AI Thoughts", value=False)
203
+
204
+ with gr.Column(scale=3):
205
+ chatbot = gr.Chatbot(label="Conversation", height=600, bubble_full_width=False, render_markdown=True)
206
+ with gr.Row():
207
+ user_input_tb = gr.Textbox(
208
+ label="Your Message",
209
+ placeholder="Type your message here...",
210
+ scale=5,
211
+ autofocus=True
212
+ )
213
+ send_btn = gr.Button("✉️ Send", variant="primary", scale=1)
214
+ regenerate_btn = gr.Button("🔄 Regenerate", scale=1)
215
+
216
+ # --- Event Handling: Wiring the UI to the backend functions ---
217
+
218
+ new_discussion_btn.click(
219
+ fn=handle_new_discussion,
220
+ inputs=[new_discussion_name],
221
+ outputs=[discussion_dd, discussion_id_state, chatbot]
222
+ ).then(fn=lambda: "", inputs=None, outputs=[new_discussion_name])
223
+
224
+ delete_discussion_btn.click(
225
+ fn=handle_delete_discussion,
226
+ inputs=[discussion_id_state],
227
+ outputs=[discussion_dd, discussion_id_state, chatbot]
228
+ )
229
+
230
+ discussion_dd.change(
231
+ fn=handle_load_discussion,
232
+ inputs=[discussion_dd],
233
+ outputs=[chatbot]
234
+ ).then(lambda x: x, inputs=[discussion_dd], outputs=[discussion_id_state])
235
+
236
+ user_input_tb.submit(
237
+ fn=handle_chat_submit,
238
+ inputs=[user_input_tb, chatbot, discussion_id_state, show_thoughts_check],
239
+ outputs=[user_input_tb, chatbot]
240
+ )
241
+ send_btn.click(
242
+ fn=handle_chat_submit,
243
+ inputs=[user_input_tb, chatbot, discussion_id_state, show_thoughts_check],
244
+ outputs=[user_input_tb, chatbot]
245
+ )
246
+ regenerate_btn.click(
247
+ fn=handle_regenerate,
248
+ inputs=[chatbot, discussion_id_state, show_thoughts_check],
249
+ outputs=[chatbot]
250
+ )
251
+
252
+ if __name__ == "__main__":
253
+ if lc is None or db_manager is None:
254
+ print("Could not start Gradio app due to initialization failure.")
255
+ else:
256
+ demo.launch()
257
+ print("\n--- App closed. Cleaning up. ---")
258
+ if os.path.exists("research_projects_gradio.db"):
259
+ os.remove("research_projects_gradio.db")
@@ -0,0 +1,155 @@
1
+ # research_app_final.py
2
+
3
+ import os
4
+ import json
5
+ import shutil
6
+ from pathlib import Path
7
+ # Use the correct, specified import style
8
+ from lollms_client import LollmsClient
9
+ from lollms_client.lollms_discussion import DatabaseManager, LollmsDiscussion
10
+ from lollms_client.lollms_types import MSG_TYPE
11
+ from sqlalchemy import Column, String
12
+
13
+ # --- 1. Define Application-Specific Schema ---
14
+ # The developer can define their own fields for the database tables.
15
+ # This allows applications to store and query their own metadata.
16
+ class ResearchDiscussionMixin:
17
+ # We want each discussion to have a 'project_name' that we can search for.
18
+ project_name = Column(String(100), index=True, nullable=False)
19
+
20
+ class ResearchMessageMixin:
21
+ # This mixin is empty for this example.
22
+ pass
23
+
24
+ def setup_migration_dummies(folder: Path):
25
+ """Creates a dummy JSON file to simulate an old, file-based project structure."""
26
+ if not folder.exists():
27
+ folder.mkdir(parents=True, exist_ok=True)
28
+
29
+ # This data structure mimics what the old `to_dict` would have produced.
30
+ discussion_data = {
31
+ "id": "old_project_alpha",
32
+ "metadata": {"project_name": "Project Alpha"},
33
+ "system_prompt": "This is the system prompt for Alpha.",
34
+ "created_at": "2023-01-01T12:00:00",
35
+ "updated_at": "2023-01-01T12:05:00",
36
+ "messages": [
37
+ {"id": "msg1", "sender": "user", "sender_type":"user", "content": "What was the first finding?", "created_at": "2023-01-01T12:00:00"},
38
+ {"id": "msg2", "sender": "assistant", "sender_type":"assistant", "content": "It was about quantum states.", "parent_id": "msg1", "created_at": "2023-01-01T12:05:00"}
39
+ ]
40
+ }
41
+ with open(folder / "project_alpha.json", "w") as f:
42
+ json.dump(discussion_data, f, indent=2)
43
+ print(f"Created dummy migration file in '{folder}'.")
44
+
45
+ def main():
46
+ # --- 2. Setup: Lollms Client is always needed ---
47
+ print("--- LOLLMS Research Assistant (Final Version) ---")
48
+ try:
49
+ # Instantiate the real LollmsClient to connect to a running model service.
50
+ # Ensure Ollama is running and has pulled the specified model.
51
+ lc = LollmsClient("ollama", model_name="mistral-nemo:latest")
52
+ print("LollmsClient connected successfully to Ollama.")
53
+ except Exception as e:
54
+ print(f"\nFATAL: Could not connect to LLM binding. Is Ollama running?\nError: {e}")
55
+ return
56
+
57
+ # --- DEMO 1: In-Memory Mode (Backward Compatibility) ---
58
+ print("\n--- DEMO 1: In-Memory Discussion ---")
59
+
60
+ # Create an in-memory discussion by NOT passing a db_manager.
61
+ in_memory_discussion = LollmsDiscussion.create_new(lollms_client=lc)
62
+ in_memory_discussion.system_prompt = "You are a helpful assistant."
63
+ print("Created an in-memory discussion.")
64
+
65
+ # Interact with it. The state is held entirely in the object.
66
+ user_input_mem = "Can you remember that my favorite color is blue?"
67
+ print(f"You > {user_input_mem}")
68
+ print("AI > ", end="", flush=True)
69
+ def stream_to_console(token, msg_type=MSG_TYPE.MSG_TYPE_CHUNK):
70
+ print(token, end="", flush=True)
71
+ return True
72
+ in_memory_discussion.chat(user_input_mem, streaming_callback=stream_to_console)
73
+ print()
74
+
75
+ # Save its state to a JSON file. This now works correctly.
76
+ file_path = Path("./in_memory_save.json")
77
+ with open(file_path, "w") as f:
78
+ json.dump(in_memory_discussion.to_dict(), f, indent=2)
79
+ print(f"\nIn-memory discussion saved to '{file_path}'.")
80
+ os.remove(file_path)
81
+
82
+ # --- DEMO 2: Database-Backed Mode with Migration ---
83
+ print("\n--- DEMO 2: Database-Backed Mode ---")
84
+ DB_PATH = "sqlite:///research_projects_final.db"
85
+ ENCRYPTION_KEY = "a-secure-password-for-the-database"
86
+ MIGRATION_FOLDER = Path("./old_discussions")
87
+
88
+ try:
89
+ # Initialize the DatabaseManager with our schema and encryption key.
90
+ db_manager = DatabaseManager(
91
+ db_path=DB_PATH,
92
+ discussion_mixin=ResearchDiscussionMixin,
93
+ message_mixin=ResearchMessageMixin,
94
+ encryption_key=ENCRYPTION_KEY
95
+ )
96
+ print(f"Database setup complete. Encryption is ENABLED.")
97
+ except Exception as e:
98
+ print(f"\nFATAL: Could not initialize database. Error: {e}")
99
+ return
100
+
101
+ # Demonstrate the one-time migration from a folder of JSON files.
102
+ setup_migration_dummies(MIGRATION_FOLDER)
103
+ input("\nDummy migration files created. Press Enter to run the migration...")
104
+ LollmsDiscussion.migrate(lollms_client=lc, db_manager=db_manager, folder_path=MIGRATION_FOLDER)
105
+
106
+ session = db_manager.get_session()
107
+ migrated_count = session.query(db_manager.DiscussionModel).count()
108
+ print(f"Verification: Found {migrated_count} discussions in the database after migration.")
109
+ session.close()
110
+
111
+ # --- DEMO 3: Live Chat with a DB-Backed Discussion ---
112
+ input("\nMigration complete. Press Enter to start a new, database-backed chat session...")
113
+
114
+ # Create a new, database-backed discussion with our custom 'project_name'.
115
+ discussion = LollmsDiscussion.create_new(
116
+ lollms_client=lc,
117
+ db_manager=db_manager,
118
+ max_context_size=lc.default_ctx_size // 2,
119
+ autosave=True,
120
+ project_name="Project Gamma (Live)"
121
+ )
122
+ discussion.system_prompt = "You are a helpful assistant for Project Gamma."
123
+
124
+ print(f"\n--- Live Chat for '{discussion.db_discussion.project_name}' ---")
125
+ print("Type your message, or '/exit', '/export_openai', '/export_ollama' to quit.")
126
+
127
+ while True:
128
+ user_input = input("\nYou > ")
129
+ if user_input.lower() == '/exit': break
130
+
131
+ if user_input.lower().startswith('/export'):
132
+ try:
133
+ format_type = user_input.split('_')[1] + "_chat"
134
+ exported_data = discussion.export(format_type)
135
+ print(f"\n--- Exported for {format_type.split('_')[0].upper()} ---")
136
+ print(json.dumps(exported_data, indent=2))
137
+ print("-----------------------------------")
138
+ except IndexError:
139
+ print("Invalid export command. Use /export_openai or /export_ollama")
140
+ continue
141
+
142
+ print("AI > ", end="", flush=True)
143
+ # The same streaming callback works seamlessly.
144
+ discussion.chat(user_input, streaming_callback=stream_to_console)
145
+ print()
146
+
147
+ # --- Cleanup ---
148
+ print("\n--- Demo complete. Cleaning up. ---")
149
+ if os.path.exists(DB_PATH):
150
+ os.remove(DB_PATH)
151
+ if MIGRATION_FOLDER.exists():
152
+ shutil.rmtree(MIGRATION_FOLDER)
153
+
154
+ if __name__ == "__main__":
155
+ main()
@@ -1,13 +1,13 @@
1
1
  # lollms_client/__init__.py
2
2
  from lollms_client.lollms_core import LollmsClient, ELF_COMPLETION_FORMAT
3
3
  from lollms_client.lollms_types import MSG_TYPE # Assuming ELF_GENERATION_FORMAT is not directly used by users from here
4
- from lollms_client.lollms_discussion import LollmsDiscussion, LollmsMessage
4
+ from lollms_client.lollms_discussion import LollmsDiscussion, DatabaseManager
5
5
  from lollms_client.lollms_utilities import PromptReshaper # Keep general utilities
6
6
  # Import new MCP binding classes
7
7
  from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
8
8
 
9
9
 
10
- __version__ = "0.20.9" # Updated version
10
+ __version__ = "0.21.0" # Updated version
11
11
 
12
12
  # Optionally, you could define __all__ if you want to be explicit about exports
13
13
  __all__ = [
@@ -15,7 +15,7 @@ __all__ = [
15
15
  "ELF_COMPLETION_FORMAT",
16
16
  "MSG_TYPE",
17
17
  "LollmsDiscussion",
18
- "LollmsMessage",
18
+ "DatabaseManager",
19
19
  "PromptReshaper",
20
20
  "LollmsMCPBinding", # Export LollmsMCPBinding ABC
21
21
  "LollmsMCPBindingManager", # Export LollmsMCPBindingManager
@@ -449,7 +449,7 @@ class OllamaBinding(LollmsLLMBinding):
449
449
  """
450
450
  return {
451
451
  "name": self.binding_name, # from super class
452
- "version": ollama.__version__ if ollama else "unknown", # Ollama library version
452
+ "version": pm.get_installed_version("ollama") if ollama else "unknown", # Ollama library version
453
453
  "host_address": self.host_address,
454
454
  "model_name": self.model_name,
455
455
  "supports_structured_output": False, # Ollama primarily supports text/chat