lollms-client 0.32.1__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

Files changed (73) hide show
  1. lollms_client/__init__.py +1 -1
  2. lollms_client/llm_bindings/azure_openai/__init__.py +6 -10
  3. lollms_client/llm_bindings/claude/__init__.py +4 -7
  4. lollms_client/llm_bindings/gemini/__init__.py +3 -7
  5. lollms_client/llm_bindings/grok/__init__.py +3 -7
  6. lollms_client/llm_bindings/groq/__init__.py +4 -7
  7. lollms_client/llm_bindings/hugging_face_inference_api/__init__.py +4 -6
  8. lollms_client/llm_bindings/litellm/__init__.py +15 -6
  9. lollms_client/llm_bindings/llamacpp/__init__.py +214 -388
  10. lollms_client/llm_bindings/lollms/__init__.py +24 -14
  11. lollms_client/llm_bindings/lollms_webui/__init__.py +6 -12
  12. lollms_client/llm_bindings/mistral/__init__.py +58 -29
  13. lollms_client/llm_bindings/ollama/__init__.py +6 -11
  14. lollms_client/llm_bindings/open_router/__init__.py +45 -14
  15. lollms_client/llm_bindings/openai/__init__.py +7 -14
  16. lollms_client/llm_bindings/openllm/__init__.py +12 -12
  17. lollms_client/llm_bindings/pythonllamacpp/__init__.py +1 -1
  18. lollms_client/llm_bindings/tensor_rt/__init__.py +8 -13
  19. lollms_client/llm_bindings/transformers/__init__.py +14 -6
  20. lollms_client/llm_bindings/vllm/__init__.py +16 -12
  21. lollms_client/lollms_core.py +296 -487
  22. lollms_client/lollms_discussion.py +436 -78
  23. lollms_client/lollms_llm_binding.py +223 -11
  24. lollms_client/lollms_mcp_binding.py +33 -2
  25. lollms_client/mcp_bindings/local_mcp/__init__.py +3 -2
  26. lollms_client/mcp_bindings/remote_mcp/__init__.py +6 -5
  27. lollms_client/mcp_bindings/standard_mcp/__init__.py +3 -5
  28. lollms_client/stt_bindings/lollms/__init__.py +6 -8
  29. lollms_client/stt_bindings/whisper/__init__.py +2 -4
  30. lollms_client/stt_bindings/whispercpp/__init__.py +15 -16
  31. lollms_client/tti_bindings/dalle/__init__.py +29 -28
  32. lollms_client/tti_bindings/diffusers/__init__.py +25 -21
  33. lollms_client/tti_bindings/gemini/__init__.py +215 -0
  34. lollms_client/tti_bindings/lollms/__init__.py +8 -9
  35. lollms_client-1.0.0.dist-info/METADATA +1214 -0
  36. lollms_client-1.0.0.dist-info/RECORD +69 -0
  37. {lollms_client-0.32.1.dist-info → lollms_client-1.0.0.dist-info}/top_level.txt +0 -2
  38. examples/article_summary/article_summary.py +0 -58
  39. examples/console_discussion/console_app.py +0 -266
  40. examples/console_discussion.py +0 -448
  41. examples/deep_analyze/deep_analyse.py +0 -30
  42. examples/deep_analyze/deep_analyze_multiple_files.py +0 -32
  43. examples/function_calling_with_local_custom_mcp.py +0 -250
  44. examples/generate_a_benchmark_for_safe_store.py +0 -89
  45. examples/generate_and_speak/generate_and_speak.py +0 -251
  46. examples/generate_game_sfx/generate_game_fx.py +0 -240
  47. examples/generate_text_with_multihop_rag_example.py +0 -210
  48. examples/gradio_chat_app.py +0 -228
  49. examples/gradio_lollms_chat.py +0 -259
  50. examples/internet_search_with_rag.py +0 -226
  51. examples/lollms_chat/calculator.py +0 -59
  52. examples/lollms_chat/derivative.py +0 -48
  53. examples/lollms_chat/test_openai_compatible_with_lollms_chat.py +0 -12
  54. examples/lollms_discussions_test.py +0 -155
  55. examples/mcp_examples/external_mcp.py +0 -267
  56. examples/mcp_examples/local_mcp.py +0 -171
  57. examples/mcp_examples/openai_mcp.py +0 -203
  58. examples/mcp_examples/run_remote_mcp_example_v2.py +0 -290
  59. examples/mcp_examples/run_standard_mcp_example.py +0 -204
  60. examples/simple_text_gen_test.py +0 -173
  61. examples/simple_text_gen_with_image_test.py +0 -178
  62. examples/test_local_models/local_chat.py +0 -9
  63. examples/text_2_audio.py +0 -77
  64. examples/text_2_image.py +0 -144
  65. examples/text_2_image_diffusers.py +0 -274
  66. examples/text_and_image_2_audio.py +0 -59
  67. examples/text_gen.py +0 -30
  68. examples/text_gen_system_prompt.py +0 -29
  69. lollms_client-0.32.1.dist-info/METADATA +0 -854
  70. lollms_client-0.32.1.dist-info/RECORD +0 -101
  71. test/test_lollms_discussion.py +0 -368
  72. {lollms_client-0.32.1.dist-info → lollms_client-1.0.0.dist-info}/WHEEL +0 -0
  73. {lollms_client-0.32.1.dist-info → lollms_client-1.0.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,228 +0,0 @@
1
- # final_working_chat_app.py
2
-
3
- import sys
4
- import os
5
- import json
6
- import gradio as gr
7
- import requests
8
- from typing import List, Dict, Optional, Tuple
9
-
10
- # --- Dependency Installation ---
11
- try:
12
- import pipmaster as pm
13
- print("Pipmaster found. Ensuring dependencies are installed...")
14
- pm.ensure_packages(["gradio", "requests", "ascii_colors"])
15
- except ImportError:
16
- pass
17
-
18
- # --- Import Core Components ---
19
- try:
20
- from lollms_client import LollmsClient
21
- from lollms_client.lollms_discussion import LollmsDiscussion
22
- from ascii_colors import ASCIIColors
23
- except ImportError as e:
24
- print(f"\nFATAL: A required library is missing.\nPlease ensure lollms-client and ascii_colors are installed.")
25
- print(f"Error: {e}"); sys.exit(1)
26
-
27
- # --- Standalone Helper Functions for LollmsDiscussion ---
28
- def export_for_chatbot(discussion: Optional[LollmsDiscussion]) -> List[Dict[str, str]]:
29
- if not discussion: return []
30
- branch = discussion.get_branch(discussion.active_branch_id)
31
- return [{"role": discussion.participants.get(msg.sender, "user"), "content": msg.content} for msg in branch]
32
-
33
- def render_discussion_tree(discussion: Optional[LollmsDiscussion]) -> str:
34
- if not discussion or not discussion.messages: return "No messages yet."
35
- tree_markdown = "### Discussion Tree\n\n"; root_ids = [msg.id for msg in discussion.messages if msg.parent_id is None]
36
- def _render_node(node_id: str, depth: int) -> str:
37
- node = discussion.message_index.get(node_id)
38
- if not node: return ""
39
- is_active = " <span class='activ'>[ACTIVE]</span>" if node.id == discussion.active_branch_id else ""
40
- line = f"{' ' * depth}- **{node.sender}**: _{node.content.replace(chr(10), ' ').strip()[:60]}..._{is_active}\n"
41
- for child_id in discussion.children_index.get(node.id, []): line += _render_node(child_id, depth + 1)
42
- return line
43
- for root_id in root_ids: tree_markdown += _render_node(root_id, 0)
44
- return tree_markdown
45
-
46
- def get_message_choices(discussion: Optional[LollmsDiscussion]) -> List[tuple]:
47
- if not discussion: return []
48
- return [(f"{msg.sender}: {msg.content[:40]}...", msg.id) for msg in discussion.messages]
49
-
50
- # --- Configuration & File Management ---
51
- CONFIG_FILE = "config.json"; DISCUSSIONS_DIR = "discussions"; os.makedirs(DISCUSSIONS_DIR, exist_ok=True)
52
- DEFAULT_CONFIG = {"binding_name": "ollama", "model_name": "mistral:latest", "host_address": "http://localhost:11434", "openai_api_key": "", "openai_model_name": "gpt-4o"}
53
- def load_config() -> Dict:
54
- if os.path.exists(CONFIG_FILE):
55
- try:
56
- with open(CONFIG_FILE, 'r') as f: ASCIIColors.info(f"Loaded config from {CONFIG_FILE}"); return json.load(f)
57
- except: ASCIIColors.warning(f"Could not load {CONFIG_FILE}, using defaults."); return DEFAULT_CONFIG
58
- return DEFAULT_CONFIG
59
- def save_config(config: Dict):
60
- with open(CONFIG_FILE, 'w') as f: json.dump(config, f, indent=2); ASCIIColors.green(f"Saved config to {CONFIG_FILE}")
61
-
62
- # --- LollmsClient & Discussion Management ---
63
- def create_lollms_client(config: Dict) -> Optional[LollmsClient]:
64
- try:
65
- if config["binding_name"] == "ollama": client = LollmsClient(binding_name="ollama", host_address=config["host_address"], model_name=config["model_name"])
66
- elif config["binding_name"] == "openai":
67
- if not config.get("openai_api_key"): gr.Warning("OpenAI API key missing."); return None
68
- client = LollmsClient(binding_name="openai", model_name=config["openai_model_name"], service_key=config["openai_api_key"])
69
- else: gr.Warning(f"Unsupported binding: {config['binding_name']}"); return None
70
- ASCIIColors.green("LollmsClient created successfully."); return client
71
- except Exception as e: gr.Error(f"Failed to create LollmsClient: {e}"); return None
72
- def get_discussions_list() -> List[str]: return sorted([f for f in os.listdir(DISCUSSIONS_DIR) if f.endswith(".yaml")])
73
- def load_discussion(filename: str, client: LollmsClient) -> Optional[LollmsDiscussion]:
74
- if not client: ASCIIColors.warning("Cannot load discussion: client is not initialized."); return None
75
- try:
76
- discussion = LollmsDiscussion(client); discussion.load_from_disk(os.path.join(DISCUSSIONS_DIR, filename))
77
- ASCIIColors.info(f"Loaded discussion: {filename}"); return discussion
78
- except Exception as e: gr.Error(f"Failed to load discussion {filename}: {e}"); return None
79
- def list_ollama_models(host: str) -> List[str]:
80
- try:
81
- r = requests.get(f"{host}/api/tags"); r.raise_for_status(); return [m["name"] for m in r.json().get("models", [])]
82
- except: gr.Warning(f"Could not fetch models from {host}."); return []
83
-
84
- # --- Gradio UI & Logic ---
85
- with gr.Blocks(theme=gr.themes.Soft(), css=".activ { font-weight: bold; color: #FF4B4B; }") as demo:
86
- client_state = gr.State()
87
- discussion_state = gr.State()
88
-
89
- gr.Markdown("# 🌿 Multi-Branch Discussion App")
90
- with gr.Row():
91
- with gr.Column(scale=1):
92
- gr.Markdown("### 📝 Session & Branch Management")
93
- discussion_selector = gr.Dropdown(label="Load Discussion", interactive=True)
94
- new_discussion_name = gr.Textbox(label="New Discussion Name", placeholder="Enter name and press Enter...")
95
- delete_discussion_button = gr.Button("Delete Current Discussion", variant="stop")
96
- branch_selector = gr.Dropdown(label="Select Message to Branch From", interactive=True)
97
- discussion_tree_display = gr.Markdown("No discussion loaded.")
98
- with gr.Column(scale=2):
99
- with gr.Accordion("⚙️ Settings & System Prompt", open=False):
100
- system_prompt_input = gr.Textbox(label="System Prompt", lines=3, interactive=True)
101
- with gr.Row():
102
- binding_selector = gr.Radio(["ollama", "openai"], label="AI Binding")
103
- save_settings_button = gr.Button("Save Settings & Re-initialize", variant="primary")
104
- with gr.Group(visible=True) as ollama_settings_group:
105
- ollama_host_input = gr.Textbox(label="Ollama Host Address"); ollama_model_selector = gr.Dropdown(label="Ollama Model", interactive=True); refresh_ollama_button = gr.Button("Refresh Ollama Models")
106
- with gr.Group(visible=False) as openai_settings_group:
107
- openai_api_key_input = gr.Textbox(label="OpenAI API Key", type="password"); openai_model_selector = gr.Dropdown(choices=["gpt-4o", "gpt-4o-mini", "gpt-4-turbo"], label="OpenAI Model", interactive=True)
108
- chatbot = gr.Chatbot(label="Conversation", height=600, type="messages")
109
- user_input = gr.Textbox(show_label=False, placeholder="Type your message here...", lines=3)
110
- send_button = gr.Button("Send", variant="primary")
111
-
112
- # --- Event Handler Functions ---
113
- def on_load():
114
- config = load_config(); client = create_lollms_client(config)
115
- discussions_list = get_discussions_list(); discussion = load_discussion(discussions_list[0], client) if discussions_list else (LollmsDiscussion(client) if client else None)
116
- active_discussion_file = discussions_list[0] if discussions_list else None
117
-
118
- history = export_for_chatbot(discussion) if discussion else [{"role": "assistant", "content": "Welcome! Configure client in Settings and create a new chat."}]
119
- tree = render_discussion_tree(discussion); branch_choices = get_message_choices(discussion)
120
- sys_prompt = discussion.system_prompt if discussion else ""
121
- active_branch_id = discussion.active_branch_id if discussion else None
122
- is_ollama = config['binding_name'] == 'ollama'; ollama_models = list_ollama_models(config['host_address']) if is_ollama and client else []
123
-
124
- return (client, discussion, gr.update(choices=discussions_list, value=active_discussion_file), config['binding_name'],
125
- gr.update(visible=is_ollama), gr.update(visible=not is_ollama), config['host_address'],
126
- gr.update(choices=ollama_models, value=config.get('model_name')), config['openai_api_key'],
127
- config.get('openai_model_name'), sys_prompt, history, tree, gr.update(choices=branch_choices, value=active_branch_id))
128
-
129
- def handle_save_settings(binding, host, ollama_model, openai_key, openai_model):
130
- config = {"binding_name": binding, "host_address": host, "model_name": ollama_model, "openai_api_key": openai_key, "openai_model_name": openai_model}
131
- save_config(config); gr.Info("Settings saved! Reloading application..."); return on_load()
132
-
133
- def handle_new_discussion(client, name):
134
- if not client: gr.Error("Client not initialized."); return (gr.skip(),) * 5
135
- if not name.strip(): gr.Warning("Provide a name."); return (gr.skip(),) * 5
136
- filename = f"{name.strip().replace(' ', '_')}.yaml"
137
- if os.path.exists(os.path.join(DISCUSSIONS_DIR, filename)): gr.Warning(f"Discussion '{name}' already exists."); return (gr.skip(),) * 5
138
- discussion = LollmsDiscussion(client); discussion.set_participants({"user": "user", "assistant": "assistant"})
139
- discussion.add_message("assistant", f"This is the beginning of '{name}'."); discussion.save_to_disk(os.path.join(DISCUSSIONS_DIR, filename))
140
- return discussion, gr.update(choices=get_discussions_list(), value=filename), export_for_chatbot(discussion), render_discussion_tree(discussion), gr.update(choices=get_message_choices(discussion), value=discussion.active_branch_id)
141
-
142
- def handle_load_discussion(client, filename):
143
- if not client: gr.Error("Client not initialized."); return (gr.skip(),) * 5
144
- if not filename: return (gr.skip(),) * 5
145
- discussion = load_discussion(filename, client)
146
- if not discussion: return (gr.skip(),) * 5
147
- return discussion, discussion.system_prompt or "", export_for_chatbot(discussion), render_discussion_tree(discussion), gr.update(choices=get_message_choices(discussion), value=discussion.active_branch_id)
148
-
149
- def handle_delete_discussion(filename):
150
- if not filename: gr.Warning("No discussion selected to delete."); return (gr.skip(),) * 14
151
- try:
152
- os.remove(os.path.join(DISCUSSIONS_DIR, filename)); ASCIIColors.red(f"Deleted discussion: {filename}"); gr.Info(f"Deleted {filename}.")
153
- return on_load()
154
- except Exception as e:
155
- gr.Error(f"Failed to delete file: {e}"); return (gr.skip(),) * 14
156
-
157
- def handle_chat_submit(client, discussion, user_text, history, filename):
158
- if not client: gr.Error("Client not initialized."); return
159
- if not discussion: gr.Error("No discussion loaded."); return
160
- if not user_text.strip(): return
161
- if not filename: gr.Error("No active discussion file. Cannot save."); return
162
-
163
- parent_id = discussion.active_branch_id
164
- discussion.add_message(sender="user", content=user_text, parent_id=parent_id)
165
- history.append({"role": "user", "content": user_text}); history.append({"role": "assistant", "content": ""})
166
- yield history
167
-
168
- full_response = ""
169
- try:
170
- # The callback must return True to continue the stream.
171
- for chunk in client.chat(discussion, stream=True, streaming_callback=lambda c,t: True):
172
- full_response += chunk; history[-1]["content"] = full_response; yield history
173
- discussion.add_message(sender="assistant", content=full_response); discussion.save_to_disk(os.path.join(DISCUSSIONS_DIR, filename))
174
- except Exception as e:
175
- full_response = f"An error occurred: {e}"; gr.Error(full_response); history[-1]["content"] = full_response
176
- discussion.add_message(sender="assistant", content=f"ERROR: {full_response}")
177
-
178
- def on_chat_finish(discussion):
179
- # This function updates non-streaming components after the chat is done
180
- if not discussion: return gr.skip(), gr.skip()
181
- return render_discussion_tree(discussion), gr.update(choices=get_message_choices(discussion), value=discussion.active_branch_id)
182
-
183
- def handle_branch_change(discussion, selected_id):
184
- if not discussion or not selected_id: return gr.skip(), gr.skip()
185
- discussion.set_active_branch(selected_id)
186
- return discussion, export_for_chatbot(discussion)
187
-
188
- # --- Wire up Components ---
189
- outputs_on_load = [client_state, discussion_state, discussion_selector, binding_selector, ollama_settings_group, openai_settings_group, ollama_host_input, ollama_model_selector, openai_api_key_input, openai_model_selector, system_prompt_input, chatbot, discussion_tree_display, branch_selector]
190
- demo.load(on_load, outputs=outputs_on_load)
191
- save_settings_button.click(handle_save_settings, [binding_selector, ollama_host_input, ollama_model_selector, openai_api_key_input, openai_model_selector], outputs_on_load)
192
- binding_selector.change(lambda x: (gr.update(visible=x=='ollama'), gr.update(visible=x=='openai')), binding_selector, [ollama_settings_group, openai_settings_group])
193
- refresh_ollama_button.click(list_ollama_models, ollama_host_input, ollama_model_selector)
194
- system_prompt_input.blur(lambda d,t,f: d.set_system_prompt(t) and d.save_to_disk(os.path.join(DISCUSSIONS_DIR,f)) if d and f else None, [discussion_state, system_prompt_input, discussion_selector], [])
195
-
196
- new_discussion_name.submit(handle_new_discussion, [client_state, new_discussion_name], [discussion_state, discussion_selector, chatbot, discussion_tree_display, branch_selector]).then(lambda: "", outputs=[new_discussion_name])
197
- discussion_selector.change(handle_load_discussion, [client_state, discussion_selector], [discussion_state, system_prompt_input, chatbot, discussion_tree_display, branch_selector])
198
- delete_discussion_button.click(handle_delete_discussion, [discussion_selector], outputs_on_load)
199
-
200
- # --- CORRECTED WIRING FOR CHAT ---
201
- chat_stream_event = user_input.submit(
202
- fn=handle_chat_submit,
203
- inputs=[client_state, discussion_state, user_input, chatbot, discussion_selector],
204
- outputs=[chatbot],
205
- )
206
- # After the stream from handle_chat_submit is done, its input (discussion_state) will be updated.
207
- # We can then pass that state to on_chat_finish.
208
- chat_stream_event.then(
209
- fn=on_chat_finish,
210
- inputs=[discussion_state], # The input is the state object that was modified by the previous function
211
- outputs=[discussion_tree_display, branch_selector]
212
- ).then(lambda: "", outputs=[user_input])
213
-
214
- send_button_stream_event = send_button.click(
215
- fn=handle_chat_submit,
216
- inputs=[client_state, discussion_state, user_input, chatbot, discussion_selector],
217
- outputs=[chatbot]
218
- )
219
- send_button_stream_event.then(
220
- fn=on_chat_finish,
221
- inputs=[discussion_state],
222
- outputs=[discussion_tree_display, branch_selector]
223
- ).then(lambda: "", outputs=[user_input])
224
-
225
- branch_selector.change(handle_branch_change, [discussion_state, branch_selector], [discussion_state, chatbot])
226
-
227
- if __name__ == "__main__":
228
- demo.launch()
@@ -1,259 +0,0 @@
1
- # research_app_final.py
2
-
3
- import os
4
- import json
5
- import shutil
6
- import gradio as gr
7
- from pathlib import Path
8
- # Use the correct, specified import style
9
- from lollms_client import LollmsClient, LollmsDiscussion, MSG_TYPE, LollmsDataManager
10
- from ascii_colors import ASCIIColors
11
- from sqlalchemy import Column, String
12
-
13
- # --- 1. Define Application-Specific Schema ---
14
- # This allows applications to store and query their own metadata in the database.
15
- class ResearchDiscussionMixin:
16
- project_name = Column(String(100), index=True, nullable=False)
17
-
18
- class ResearchMessageMixin:
19
- pass
20
-
21
- # --- 2. Global Setup: Client and Database ---
22
- # These are initialized once and used throughout the app's lifecycle.
23
- try:
24
- lc = LollmsClient("ollama", model_name="mistral-nemo:latest")
25
- db_manager = LollmsDataManager(
26
- db_path="sqlite:///research_projects_gradio.db",
27
- discussion_mixin=ResearchDiscussionMixin,
28
- message_mixin=ResearchMessageMixin,
29
- encryption_key="a-super-secret-key-for-the-gradio-app"
30
- )
31
- print("✅ Client and Database initialized successfully.")
32
- except Exception as e:
33
- print(f"❌ FATAL: Could not initialize services. Is Ollama running? Error: {e}")
34
- lc = None
35
- db_manager = None
36
-
37
- # --- 3. UI Helper Functions ---
38
- # These functions connect the Gradio UI to our discussion library's backend logic.
39
-
40
- def _get_discussion_list():
41
- """Helper to fetch and format the list of discussions for the dropdown."""
42
- if not db_manager: return []
43
- discussions = db_manager.list_discussions()
44
- return [(d.get('project_name', d['id']), d['id']) for d in discussions]
45
-
46
- def _format_chatbot_history(discussion: LollmsDiscussion):
47
- """Converts a discussion's active branch into Gradio's chatbot format."""
48
- history = []
49
- if not discussion: return history
50
-
51
- branch = discussion.get_branch(discussion.active_branch_id)
52
- # This robust loop correctly pairs user and AI messages.
53
- i = 0
54
- while i < len(branch):
55
- if branch[i]['sender_type'] == 'user':
56
- user_msg = branch[i]['content']
57
- if i + 1 < len(branch) and branch[i+1]['sender_type'] == 'assistant':
58
- ai_msg = branch[i+1]['content']
59
- history.append((user_msg, ai_msg))
60
- i += 2
61
- else:
62
- history.append((user_msg, None))
63
- i += 1
64
- else:
65
- ai_msg = branch[i]['content']
66
- history.append((None, ai_msg))
67
- i += 1
68
- return history
69
-
70
- # --- 4. Gradio UI Event Handler Functions ---
71
-
72
- def handle_new_discussion(name: str):
73
- """Called when the 'New Project' button is clicked."""
74
- if not name.strip():
75
- gr.Warning("Project name cannot be empty.")
76
- return gr.Dropdown(choices=_get_discussion_list()), None, []
77
-
78
- discussion = LollmsDiscussion.create_new(
79
- lollms_client=lc,
80
- db_manager=db_manager,
81
- project_name=name.strip(),
82
- autosave=True
83
- )
84
- discussion.set_system_prompt(f"This is a research project about {name.strip()}. Be helpful and concise, but use <think> tags to outline your process before answering.")
85
- discussion.set_participants({"user":"user", "assistant":"assistant"})
86
-
87
- gr.Info(f"Project '{name.strip()}' created!")
88
-
89
- return gr.Dropdown(choices=_get_discussion_list(), value=discussion.id), discussion.id, []
90
-
91
- def handle_load_discussion(discussion_id: str):
92
- """Called when a discussion is selected from the dropdown."""
93
- if not discussion_id:
94
- return None, []
95
-
96
- discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id)
97
- chatbot_history = _format_chatbot_history(discussion)
98
-
99
- return chatbot_history
100
-
101
- def handle_delete_discussion(discussion_id: str):
102
- """Called when the 'Delete' button is clicked."""
103
- if not discussion_id:
104
- gr.Warning("No project selected to delete.")
105
- return gr.Dropdown(choices=_get_discussion_list()), None, []
106
-
107
- db_manager.delete_discussion(discussion_id)
108
- gr.Info("Project deleted.")
109
-
110
- return gr.Dropdown(choices=_get_discussion_list(), value=None), None, []
111
-
112
- def handle_chat_submit(user_input: str, chatbot_history: list, discussion_id: str, show_thoughts: bool):
113
- """The main chat handler, called on message submit. Uses a generator for streaming."""
114
- if not discussion_id:
115
- gr.Warning("Please select or create a project first.")
116
- return "", chatbot_history
117
- if not user_input.strip():
118
- return "", chatbot_history
119
-
120
- discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id, autosave=True)
121
-
122
- chatbot_history.append((user_input, None))
123
- yield "", chatbot_history
124
-
125
- ai_message_buffer = ""
126
-
127
- def stream_to_chatbot(token: str, msg_type: MSG_TYPE):
128
- nonlocal ai_message_buffer
129
- if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
130
- ai_message_buffer += token
131
- chatbot_history[-1] = (user_input, ai_message_buffer)
132
- elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
133
- thought_html = f"<p style='color:magenta;'><i>{token}</i></p>"
134
- chatbot_history[-1] = (user_input, ai_message_buffer + thought_html)
135
- return True
136
-
137
- discussion.chat(
138
- user_message=user_input,
139
- show_thoughts=show_thoughts,
140
- streaming_callback=stream_to_chatbot
141
- )
142
-
143
- yield "", chatbot_history
144
-
145
- def handle_regenerate(chatbot_history: list, discussion_id: str, show_thoughts: bool):
146
- """Called to regenerate the last AI response."""
147
- if not discussion_id:
148
- gr.Warning("Please select a project first.")
149
- return chatbot_history
150
- if not chatbot_history or chatbot_history[-1][1] is None:
151
- gr.Warning("Nothing to regenerate.")
152
- return chatbot_history
153
-
154
- discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id, autosave=True)
155
-
156
- chatbot_history.pop()
157
- user_input_for_ui = chatbot_history[-1][0] if chatbot_history else ""
158
- chatbot_history.append((user_input_for_ui, None))
159
- yield chatbot_history
160
-
161
- ai_message_buffer = ""
162
-
163
- def stream_to_chatbot(token: str, msg_type: MSG_TYPE):
164
- nonlocal ai_message_buffer
165
- if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
166
- ai_message_buffer += token
167
- chatbot_history[-1] = (user_input_for_ui, ai_message_buffer)
168
- elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
169
- thought_html = f"<p style='color:magenta;'><i>{token}</i></p>"
170
- chatbot_history[-1] = (user_input_for_ui, ai_message_buffer + thought_html)
171
- return True
172
-
173
- discussion.regenerate_branch(
174
- show_thoughts=show_thoughts,
175
- streaming_callback=stream_to_chatbot
176
- )
177
-
178
- yield chatbot_history
179
-
180
- # --- 5. Build and Launch the Gradio App ---
181
- with gr.Blocks(theme=gr.themes.Soft(), title="Lollms Discussion Manager") as demo:
182
- discussion_id_state = gr.State(None)
183
-
184
- gr.Markdown("# Lollms Discussion Manager")
185
-
186
- with gr.Row():
187
- with gr.Column(scale=1, min_width=300):
188
- gr.Markdown("## Projects")
189
- discussion_dd = gr.Dropdown(
190
- choices=_get_discussion_list(),
191
- label="Select Project",
192
- interactive=True
193
- )
194
- with gr.Accordion("Manage Projects", open=False):
195
- new_discussion_name = gr.Textbox(label="New Project Name", placeholder="Enter name and press button")
196
- with gr.Row():
197
- new_discussion_btn = gr.Button("➕ New")
198
- delete_discussion_btn = gr.Button("❌ Delete")
199
-
200
- gr.Markdown("---")
201
- gr.Markdown("## Options")
202
- show_thoughts_check = gr.Checkbox(label="Show AI Thoughts", value=False)
203
-
204
- with gr.Column(scale=3):
205
- chatbot = gr.Chatbot(label="Conversation", height=600, bubble_full_width=False, render_markdown=True)
206
- with gr.Row():
207
- user_input_tb = gr.Textbox(
208
- label="Your Message",
209
- placeholder="Type your message here...",
210
- scale=5,
211
- autofocus=True
212
- )
213
- send_btn = gr.Button("✉️ Send", variant="primary", scale=1)
214
- regenerate_btn = gr.Button("🔄 Regenerate", scale=1)
215
-
216
- # --- Event Handling: Wiring the UI to the backend functions ---
217
-
218
- new_discussion_btn.click(
219
- fn=handle_new_discussion,
220
- inputs=[new_discussion_name],
221
- outputs=[discussion_dd, discussion_id_state, chatbot]
222
- ).then(fn=lambda: "", inputs=None, outputs=[new_discussion_name])
223
-
224
- delete_discussion_btn.click(
225
- fn=handle_delete_discussion,
226
- inputs=[discussion_id_state],
227
- outputs=[discussion_dd, discussion_id_state, chatbot]
228
- )
229
-
230
- discussion_dd.change(
231
- fn=handle_load_discussion,
232
- inputs=[discussion_dd],
233
- outputs=[chatbot]
234
- ).then(lambda x: x, inputs=[discussion_dd], outputs=[discussion_id_state])
235
-
236
- user_input_tb.submit(
237
- fn=handle_chat_submit,
238
- inputs=[user_input_tb, chatbot, discussion_id_state, show_thoughts_check],
239
- outputs=[user_input_tb, chatbot]
240
- )
241
- send_btn.click(
242
- fn=handle_chat_submit,
243
- inputs=[user_input_tb, chatbot, discussion_id_state, show_thoughts_check],
244
- outputs=[user_input_tb, chatbot]
245
- )
246
- regenerate_btn.click(
247
- fn=handle_regenerate,
248
- inputs=[chatbot, discussion_id_state, show_thoughts_check],
249
- outputs=[chatbot]
250
- )
251
-
252
- if __name__ == "__main__":
253
- if lc is None or db_manager is None:
254
- print("Could not start Gradio app due to initialization failure.")
255
- else:
256
- demo.launch()
257
- print("\n--- App closed. Cleaning up. ---")
258
- if os.path.exists("research_projects_gradio.db"):
259
- os.remove("research_projects_gradio.db")