lollms-client 0.20.10__py3-none-any.whl → 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lollms-client might be problematic. Click here for more details.
- examples/console_discussion.py +448 -0
- examples/gradio_lollms_chat.py +259 -0
- examples/lollms_discussions_test.py +155 -0
- lollms_client/__init__.py +5 -2
- lollms_client/llm_bindings/ollama/__init__.py +1 -1
- lollms_client/lollms_core.py +86 -2
- lollms_client/lollms_discussion.py +638 -386
- lollms_client/lollms_personality.py +182 -0
- lollms_client/lollms_types.py +19 -16
- lollms_client/lollms_utilities.py +71 -57
- lollms_client/mcp_bindings/remote_mcp/__init__.py +2 -1
- {lollms_client-0.20.10.dist-info → lollms_client-0.22.0.dist-info}/METADATA +1 -1
- {lollms_client-0.20.10.dist-info → lollms_client-0.22.0.dist-info}/RECORD +17 -15
- {lollms_client-0.20.10.dist-info → lollms_client-0.22.0.dist-info}/top_level.txt +1 -0
- personalities/parrot.py +10 -0
- examples/personality_test/chat_test.py +0 -37
- examples/personality_test/chat_with_aristotle.py +0 -42
- examples/personality_test/tesks_test.py +0 -62
- {lollms_client-0.20.10.dist-info → lollms_client-0.22.0.dist-info}/WHEEL +0 -0
- {lollms_client-0.20.10.dist-info → lollms_client-0.22.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
# research_app_final.py
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import json
|
|
5
|
+
import shutil
|
|
6
|
+
import gradio as gr
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
# Use the correct, specified import style
|
|
9
|
+
from lollms_client import LollmsClient, LollmsDiscussion, MSG_TYPE, LollmsDataManager
|
|
10
|
+
from ascii_colors import ASCIIColors
|
|
11
|
+
from sqlalchemy import Column, String
|
|
12
|
+
|
|
13
|
+
# --- 1. Define Application-Specific Schema ---
|
|
14
|
+
# This allows applications to store and query their own metadata in the database.
|
|
15
|
+
class ResearchDiscussionMixin:
|
|
16
|
+
project_name = Column(String(100), index=True, nullable=False)
|
|
17
|
+
|
|
18
|
+
class ResearchMessageMixin:
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
# --- 2. Global Setup: Client and Database ---
|
|
22
|
+
# These are initialized once and used throughout the app's lifecycle.
|
|
23
|
+
try:
|
|
24
|
+
lc = LollmsClient("ollama", model_name="mistral-nemo:latest")
|
|
25
|
+
db_manager = LollmsDataManager(
|
|
26
|
+
db_path="sqlite:///research_projects_gradio.db",
|
|
27
|
+
discussion_mixin=ResearchDiscussionMixin,
|
|
28
|
+
message_mixin=ResearchMessageMixin,
|
|
29
|
+
encryption_key="a-super-secret-key-for-the-gradio-app"
|
|
30
|
+
)
|
|
31
|
+
print("✅ Client and Database initialized successfully.")
|
|
32
|
+
except Exception as e:
|
|
33
|
+
print(f"❌ FATAL: Could not initialize services. Is Ollama running? Error: {e}")
|
|
34
|
+
lc = None
|
|
35
|
+
db_manager = None
|
|
36
|
+
|
|
37
|
+
# --- 3. UI Helper Functions ---
|
|
38
|
+
# These functions connect the Gradio UI to our discussion library's backend logic.
|
|
39
|
+
|
|
40
|
+
def _get_discussion_list():
|
|
41
|
+
"""Helper to fetch and format the list of discussions for the dropdown."""
|
|
42
|
+
if not db_manager: return []
|
|
43
|
+
discussions = db_manager.list_discussions()
|
|
44
|
+
return [(d.get('project_name', d['id']), d['id']) for d in discussions]
|
|
45
|
+
|
|
46
|
+
def _format_chatbot_history(discussion: LollmsDiscussion):
|
|
47
|
+
"""Converts a discussion's active branch into Gradio's chatbot format."""
|
|
48
|
+
history = []
|
|
49
|
+
if not discussion: return history
|
|
50
|
+
|
|
51
|
+
branch = discussion.get_branch(discussion.active_branch_id)
|
|
52
|
+
# This robust loop correctly pairs user and AI messages.
|
|
53
|
+
i = 0
|
|
54
|
+
while i < len(branch):
|
|
55
|
+
if branch[i]['sender_type'] == 'user':
|
|
56
|
+
user_msg = branch[i]['content']
|
|
57
|
+
if i + 1 < len(branch) and branch[i+1]['sender_type'] == 'assistant':
|
|
58
|
+
ai_msg = branch[i+1]['content']
|
|
59
|
+
history.append((user_msg, ai_msg))
|
|
60
|
+
i += 2
|
|
61
|
+
else:
|
|
62
|
+
history.append((user_msg, None))
|
|
63
|
+
i += 1
|
|
64
|
+
else:
|
|
65
|
+
ai_msg = branch[i]['content']
|
|
66
|
+
history.append((None, ai_msg))
|
|
67
|
+
i += 1
|
|
68
|
+
return history
|
|
69
|
+
|
|
70
|
+
# --- 4. Gradio UI Event Handler Functions ---
|
|
71
|
+
|
|
72
|
+
def handle_new_discussion(name: str):
|
|
73
|
+
"""Called when the 'New Project' button is clicked."""
|
|
74
|
+
if not name.strip():
|
|
75
|
+
gr.Warning("Project name cannot be empty.")
|
|
76
|
+
return gr.Dropdown(choices=_get_discussion_list()), None, []
|
|
77
|
+
|
|
78
|
+
discussion = LollmsDiscussion.create_new(
|
|
79
|
+
lollms_client=lc,
|
|
80
|
+
db_manager=db_manager,
|
|
81
|
+
project_name=name.strip(),
|
|
82
|
+
autosave=True
|
|
83
|
+
)
|
|
84
|
+
discussion.set_system_prompt(f"This is a research project about {name.strip()}. Be helpful and concise, but use <think> tags to outline your process before answering.")
|
|
85
|
+
discussion.set_participants({"user":"user", "assistant":"assistant"})
|
|
86
|
+
|
|
87
|
+
gr.Info(f"Project '{name.strip()}' created!")
|
|
88
|
+
|
|
89
|
+
return gr.Dropdown(choices=_get_discussion_list(), value=discussion.id), discussion.id, []
|
|
90
|
+
|
|
91
|
+
def handle_load_discussion(discussion_id: str):
|
|
92
|
+
"""Called when a discussion is selected from the dropdown."""
|
|
93
|
+
if not discussion_id:
|
|
94
|
+
return None, []
|
|
95
|
+
|
|
96
|
+
discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id)
|
|
97
|
+
chatbot_history = _format_chatbot_history(discussion)
|
|
98
|
+
|
|
99
|
+
return chatbot_history
|
|
100
|
+
|
|
101
|
+
def handle_delete_discussion(discussion_id: str):
|
|
102
|
+
"""Called when the 'Delete' button is clicked."""
|
|
103
|
+
if not discussion_id:
|
|
104
|
+
gr.Warning("No project selected to delete.")
|
|
105
|
+
return gr.Dropdown(choices=_get_discussion_list()), None, []
|
|
106
|
+
|
|
107
|
+
db_manager.delete_discussion(discussion_id)
|
|
108
|
+
gr.Info("Project deleted.")
|
|
109
|
+
|
|
110
|
+
return gr.Dropdown(choices=_get_discussion_list(), value=None), None, []
|
|
111
|
+
|
|
112
|
+
def handle_chat_submit(user_input: str, chatbot_history: list, discussion_id: str, show_thoughts: bool):
|
|
113
|
+
"""The main chat handler, called on message submit. Uses a generator for streaming."""
|
|
114
|
+
if not discussion_id:
|
|
115
|
+
gr.Warning("Please select or create a project first.")
|
|
116
|
+
return "", chatbot_history
|
|
117
|
+
if not user_input.strip():
|
|
118
|
+
return "", chatbot_history
|
|
119
|
+
|
|
120
|
+
discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id, autosave=True)
|
|
121
|
+
|
|
122
|
+
chatbot_history.append((user_input, None))
|
|
123
|
+
yield "", chatbot_history
|
|
124
|
+
|
|
125
|
+
ai_message_buffer = ""
|
|
126
|
+
|
|
127
|
+
def stream_to_chatbot(token: str, msg_type: MSG_TYPE):
|
|
128
|
+
nonlocal ai_message_buffer
|
|
129
|
+
if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
|
|
130
|
+
ai_message_buffer += token
|
|
131
|
+
chatbot_history[-1] = (user_input, ai_message_buffer)
|
|
132
|
+
elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
|
|
133
|
+
thought_html = f"<p style='color:magenta;'><i>{token}</i></p>"
|
|
134
|
+
chatbot_history[-1] = (user_input, ai_message_buffer + thought_html)
|
|
135
|
+
return True
|
|
136
|
+
|
|
137
|
+
discussion.chat(
|
|
138
|
+
user_message=user_input,
|
|
139
|
+
show_thoughts=show_thoughts,
|
|
140
|
+
streaming_callback=stream_to_chatbot
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
yield "", chatbot_history
|
|
144
|
+
|
|
145
|
+
def handle_regenerate(chatbot_history: list, discussion_id: str, show_thoughts: bool):
|
|
146
|
+
"""Called to regenerate the last AI response."""
|
|
147
|
+
if not discussion_id:
|
|
148
|
+
gr.Warning("Please select a project first.")
|
|
149
|
+
return chatbot_history
|
|
150
|
+
if not chatbot_history or chatbot_history[-1][1] is None:
|
|
151
|
+
gr.Warning("Nothing to regenerate.")
|
|
152
|
+
return chatbot_history
|
|
153
|
+
|
|
154
|
+
discussion = db_manager.get_discussion(lollms_client=lc, discussion_id=discussion_id, autosave=True)
|
|
155
|
+
|
|
156
|
+
chatbot_history.pop()
|
|
157
|
+
user_input_for_ui = chatbot_history[-1][0] if chatbot_history else ""
|
|
158
|
+
chatbot_history.append((user_input_for_ui, None))
|
|
159
|
+
yield chatbot_history
|
|
160
|
+
|
|
161
|
+
ai_message_buffer = ""
|
|
162
|
+
|
|
163
|
+
def stream_to_chatbot(token: str, msg_type: MSG_TYPE):
|
|
164
|
+
nonlocal ai_message_buffer
|
|
165
|
+
if msg_type == MSG_TYPE.MSG_TYPE_CHUNK:
|
|
166
|
+
ai_message_buffer += token
|
|
167
|
+
chatbot_history[-1] = (user_input_for_ui, ai_message_buffer)
|
|
168
|
+
elif msg_type == MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK:
|
|
169
|
+
thought_html = f"<p style='color:magenta;'><i>{token}</i></p>"
|
|
170
|
+
chatbot_history[-1] = (user_input_for_ui, ai_message_buffer + thought_html)
|
|
171
|
+
return True
|
|
172
|
+
|
|
173
|
+
discussion.regenerate_branch(
|
|
174
|
+
show_thoughts=show_thoughts,
|
|
175
|
+
streaming_callback=stream_to_chatbot
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
yield chatbot_history
|
|
179
|
+
|
|
180
|
+
# --- 5. Build and Launch the Gradio App ---
|
|
181
|
+
with gr.Blocks(theme=gr.themes.Soft(), title="Lollms Discussion Manager") as demo:
|
|
182
|
+
discussion_id_state = gr.State(None)
|
|
183
|
+
|
|
184
|
+
gr.Markdown("# Lollms Discussion Manager")
|
|
185
|
+
|
|
186
|
+
with gr.Row():
|
|
187
|
+
with gr.Column(scale=1, min_width=300):
|
|
188
|
+
gr.Markdown("## Projects")
|
|
189
|
+
discussion_dd = gr.Dropdown(
|
|
190
|
+
choices=_get_discussion_list(),
|
|
191
|
+
label="Select Project",
|
|
192
|
+
interactive=True
|
|
193
|
+
)
|
|
194
|
+
with gr.Accordion("Manage Projects", open=False):
|
|
195
|
+
new_discussion_name = gr.Textbox(label="New Project Name", placeholder="Enter name and press button")
|
|
196
|
+
with gr.Row():
|
|
197
|
+
new_discussion_btn = gr.Button("➕ New")
|
|
198
|
+
delete_discussion_btn = gr.Button("❌ Delete")
|
|
199
|
+
|
|
200
|
+
gr.Markdown("---")
|
|
201
|
+
gr.Markdown("## Options")
|
|
202
|
+
show_thoughts_check = gr.Checkbox(label="Show AI Thoughts", value=False)
|
|
203
|
+
|
|
204
|
+
with gr.Column(scale=3):
|
|
205
|
+
chatbot = gr.Chatbot(label="Conversation", height=600, bubble_full_width=False, render_markdown=True)
|
|
206
|
+
with gr.Row():
|
|
207
|
+
user_input_tb = gr.Textbox(
|
|
208
|
+
label="Your Message",
|
|
209
|
+
placeholder="Type your message here...",
|
|
210
|
+
scale=5,
|
|
211
|
+
autofocus=True
|
|
212
|
+
)
|
|
213
|
+
send_btn = gr.Button("✉️ Send", variant="primary", scale=1)
|
|
214
|
+
regenerate_btn = gr.Button("🔄 Regenerate", scale=1)
|
|
215
|
+
|
|
216
|
+
# --- Event Handling: Wiring the UI to the backend functions ---
|
|
217
|
+
|
|
218
|
+
new_discussion_btn.click(
|
|
219
|
+
fn=handle_new_discussion,
|
|
220
|
+
inputs=[new_discussion_name],
|
|
221
|
+
outputs=[discussion_dd, discussion_id_state, chatbot]
|
|
222
|
+
).then(fn=lambda: "", inputs=None, outputs=[new_discussion_name])
|
|
223
|
+
|
|
224
|
+
delete_discussion_btn.click(
|
|
225
|
+
fn=handle_delete_discussion,
|
|
226
|
+
inputs=[discussion_id_state],
|
|
227
|
+
outputs=[discussion_dd, discussion_id_state, chatbot]
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
discussion_dd.change(
|
|
231
|
+
fn=handle_load_discussion,
|
|
232
|
+
inputs=[discussion_dd],
|
|
233
|
+
outputs=[chatbot]
|
|
234
|
+
).then(lambda x: x, inputs=[discussion_dd], outputs=[discussion_id_state])
|
|
235
|
+
|
|
236
|
+
user_input_tb.submit(
|
|
237
|
+
fn=handle_chat_submit,
|
|
238
|
+
inputs=[user_input_tb, chatbot, discussion_id_state, show_thoughts_check],
|
|
239
|
+
outputs=[user_input_tb, chatbot]
|
|
240
|
+
)
|
|
241
|
+
send_btn.click(
|
|
242
|
+
fn=handle_chat_submit,
|
|
243
|
+
inputs=[user_input_tb, chatbot, discussion_id_state, show_thoughts_check],
|
|
244
|
+
outputs=[user_input_tb, chatbot]
|
|
245
|
+
)
|
|
246
|
+
regenerate_btn.click(
|
|
247
|
+
fn=handle_regenerate,
|
|
248
|
+
inputs=[chatbot, discussion_id_state, show_thoughts_check],
|
|
249
|
+
outputs=[chatbot]
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
if __name__ == "__main__":
|
|
253
|
+
if lc is None or db_manager is None:
|
|
254
|
+
print("Could not start Gradio app due to initialization failure.")
|
|
255
|
+
else:
|
|
256
|
+
demo.launch()
|
|
257
|
+
print("\n--- App closed. Cleaning up. ---")
|
|
258
|
+
if os.path.exists("research_projects_gradio.db"):
|
|
259
|
+
os.remove("research_projects_gradio.db")
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
# research_app_final.py
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import json
|
|
5
|
+
import shutil
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
# Use the correct, specified import style
|
|
8
|
+
from lollms_client import LollmsClient
|
|
9
|
+
from lollms_client.lollms_discussion import LollmsDataManager, LollmsDiscussion
|
|
10
|
+
from lollms_client.lollms_types import MSG_TYPE
|
|
11
|
+
from sqlalchemy import Column, String
|
|
12
|
+
|
|
13
|
+
# --- 1. Define Application-Specific Schema ---
|
|
14
|
+
# The developer can define their own fields for the database tables.
|
|
15
|
+
# This allows applications to store and query their own metadata.
|
|
16
|
+
class ResearchDiscussionMixin:
|
|
17
|
+
# We want each discussion to have a 'project_name' that we can search for.
|
|
18
|
+
project_name = Column(String(100), index=True, nullable=False)
|
|
19
|
+
|
|
20
|
+
class ResearchMessageMixin:
|
|
21
|
+
# This mixin is empty for this example.
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
def setup_migration_dummies(folder: Path):
|
|
25
|
+
"""Creates a dummy JSON file to simulate an old, file-based project structure."""
|
|
26
|
+
if not folder.exists():
|
|
27
|
+
folder.mkdir(parents=True, exist_ok=True)
|
|
28
|
+
|
|
29
|
+
# This data structure mimics what the old `to_dict` would have produced.
|
|
30
|
+
discussion_data = {
|
|
31
|
+
"id": "old_project_alpha",
|
|
32
|
+
"metadata": {"project_name": "Project Alpha"},
|
|
33
|
+
"system_prompt": "This is the system prompt for Alpha.",
|
|
34
|
+
"created_at": "2023-01-01T12:00:00",
|
|
35
|
+
"updated_at": "2023-01-01T12:05:00",
|
|
36
|
+
"messages": [
|
|
37
|
+
{"id": "msg1", "sender": "user", "sender_type":"user", "content": "What was the first finding?", "created_at": "2023-01-01T12:00:00"},
|
|
38
|
+
{"id": "msg2", "sender": "assistant", "sender_type":"assistant", "content": "It was about quantum states.", "parent_id": "msg1", "created_at": "2023-01-01T12:05:00"}
|
|
39
|
+
]
|
|
40
|
+
}
|
|
41
|
+
with open(folder / "project_alpha.json", "w") as f:
|
|
42
|
+
json.dump(discussion_data, f, indent=2)
|
|
43
|
+
print(f"Created dummy migration file in '{folder}'.")
|
|
44
|
+
|
|
45
|
+
def main():
|
|
46
|
+
# --- 2. Setup: Lollms Client is always needed ---
|
|
47
|
+
print("--- LOLLMS Research Assistant (Final Version) ---")
|
|
48
|
+
try:
|
|
49
|
+
# Instantiate the real LollmsClient to connect to a running model service.
|
|
50
|
+
# Ensure Ollama is running and has pulled the specified model.
|
|
51
|
+
lc = LollmsClient("ollama", model_name="mistral-nemo:latest")
|
|
52
|
+
print("LollmsClient connected successfully to Ollama.")
|
|
53
|
+
except Exception as e:
|
|
54
|
+
print(f"\nFATAL: Could not connect to LLM binding. Is Ollama running?\nError: {e}")
|
|
55
|
+
return
|
|
56
|
+
|
|
57
|
+
# --- DEMO 1: In-Memory Mode (Backward Compatibility) ---
|
|
58
|
+
print("\n--- DEMO 1: In-Memory Discussion ---")
|
|
59
|
+
|
|
60
|
+
# Create an in-memory discussion by NOT passing a db_manager.
|
|
61
|
+
in_memory_discussion = LollmsDiscussion.create_new(lollms_client=lc)
|
|
62
|
+
in_memory_discussion.system_prompt = "You are a helpful assistant."
|
|
63
|
+
print("Created an in-memory discussion.")
|
|
64
|
+
|
|
65
|
+
# Interact with it. The state is held entirely in the object.
|
|
66
|
+
user_input_mem = "Can you remember that my favorite color is blue?"
|
|
67
|
+
print(f"You > {user_input_mem}")
|
|
68
|
+
print("AI > ", end="", flush=True)
|
|
69
|
+
def stream_to_console(token, msg_type=MSG_TYPE.MSG_TYPE_CHUNK):
|
|
70
|
+
print(token, end="", flush=True)
|
|
71
|
+
return True
|
|
72
|
+
in_memory_discussion.chat(user_input_mem, streaming_callback=stream_to_console)
|
|
73
|
+
print()
|
|
74
|
+
|
|
75
|
+
# Save its state to a JSON file. This now works correctly.
|
|
76
|
+
file_path = Path("./in_memory_save.json")
|
|
77
|
+
with open(file_path, "w") as f:
|
|
78
|
+
json.dump(in_memory_discussion.to_dict(), f, indent=2)
|
|
79
|
+
print(f"\nIn-memory discussion saved to '{file_path}'.")
|
|
80
|
+
os.remove(file_path)
|
|
81
|
+
|
|
82
|
+
# --- DEMO 2: Database-Backed Mode with Migration ---
|
|
83
|
+
print("\n--- DEMO 2: Database-Backed Mode ---")
|
|
84
|
+
DB_PATH = "sqlite:///research_projects_final.db"
|
|
85
|
+
ENCRYPTION_KEY = "a-secure-password-for-the-database"
|
|
86
|
+
MIGRATION_FOLDER = Path("./old_discussions")
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
# Initialize the LollmsDataManager with our schema and encryption key.
|
|
90
|
+
db_manager = LollmsDataManager(
|
|
91
|
+
db_path=DB_PATH,
|
|
92
|
+
discussion_mixin=ResearchDiscussionMixin,
|
|
93
|
+
message_mixin=ResearchMessageMixin,
|
|
94
|
+
encryption_key=ENCRYPTION_KEY
|
|
95
|
+
)
|
|
96
|
+
print(f"Database setup complete. Encryption is ENABLED.")
|
|
97
|
+
except Exception as e:
|
|
98
|
+
print(f"\nFATAL: Could not initialize database. Error: {e}")
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
# Demonstrate the one-time migration from a folder of JSON files.
|
|
102
|
+
setup_migration_dummies(MIGRATION_FOLDER)
|
|
103
|
+
input("\nDummy migration files created. Press Enter to run the migration...")
|
|
104
|
+
LollmsDiscussion.migrate(lollms_client=lc, db_manager=db_manager, folder_path=MIGRATION_FOLDER)
|
|
105
|
+
|
|
106
|
+
session = db_manager.get_session()
|
|
107
|
+
migrated_count = session.query(db_manager.DiscussionModel).count()
|
|
108
|
+
print(f"Verification: Found {migrated_count} discussions in the database after migration.")
|
|
109
|
+
session.close()
|
|
110
|
+
|
|
111
|
+
# --- DEMO 3: Live Chat with a DB-Backed Discussion ---
|
|
112
|
+
input("\nMigration complete. Press Enter to start a new, database-backed chat session...")
|
|
113
|
+
|
|
114
|
+
# Create a new, database-backed discussion with our custom 'project_name'.
|
|
115
|
+
discussion = LollmsDiscussion.create_new(
|
|
116
|
+
lollms_client=lc,
|
|
117
|
+
db_manager=db_manager,
|
|
118
|
+
max_context_size=lc.default_ctx_size // 2,
|
|
119
|
+
autosave=True,
|
|
120
|
+
project_name="Project Gamma (Live)"
|
|
121
|
+
)
|
|
122
|
+
discussion.system_prompt = "You are a helpful assistant for Project Gamma."
|
|
123
|
+
|
|
124
|
+
print(f"\n--- Live Chat for '{discussion.db_discussion.project_name}' ---")
|
|
125
|
+
print("Type your message, or '/exit', '/export_openai', '/export_ollama' to quit.")
|
|
126
|
+
|
|
127
|
+
while True:
|
|
128
|
+
user_input = input("\nYou > ")
|
|
129
|
+
if user_input.lower() == '/exit': break
|
|
130
|
+
|
|
131
|
+
if user_input.lower().startswith('/export'):
|
|
132
|
+
try:
|
|
133
|
+
format_type = user_input.split('_')[1] + "_chat"
|
|
134
|
+
exported_data = discussion.export(format_type)
|
|
135
|
+
print(f"\n--- Exported for {format_type.split('_')[0].upper()} ---")
|
|
136
|
+
print(json.dumps(exported_data, indent=2))
|
|
137
|
+
print("-----------------------------------")
|
|
138
|
+
except IndexError:
|
|
139
|
+
print("Invalid export command. Use /export_openai or /export_ollama")
|
|
140
|
+
continue
|
|
141
|
+
|
|
142
|
+
print("AI > ", end="", flush=True)
|
|
143
|
+
# The same streaming callback works seamlessly.
|
|
144
|
+
discussion.chat(user_input, streaming_callback=stream_to_console)
|
|
145
|
+
print()
|
|
146
|
+
|
|
147
|
+
# --- Cleanup ---
|
|
148
|
+
print("\n--- Demo complete. Cleaning up. ---")
|
|
149
|
+
if os.path.exists(DB_PATH):
|
|
150
|
+
os.remove(DB_PATH)
|
|
151
|
+
if MIGRATION_FOLDER.exists():
|
|
152
|
+
shutil.rmtree(MIGRATION_FOLDER)
|
|
153
|
+
|
|
154
|
+
if __name__ == "__main__":
|
|
155
|
+
main()
|
lollms_client/__init__.py
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
# lollms_client/__init__.py
|
|
2
2
|
from lollms_client.lollms_core import LollmsClient, ELF_COMPLETION_FORMAT
|
|
3
3
|
from lollms_client.lollms_types import MSG_TYPE # Assuming ELF_GENERATION_FORMAT is not directly used by users from here
|
|
4
|
-
from lollms_client.lollms_discussion import LollmsDiscussion, LollmsMessage
|
|
4
|
+
from lollms_client.lollms_discussion import LollmsDiscussion, LollmsDataManager, LollmsMessage
|
|
5
|
+
from lollms_client.lollms_personality import LollmsPersonality
|
|
5
6
|
from lollms_client.lollms_utilities import PromptReshaper # Keep general utilities
|
|
6
7
|
# Import new MCP binding classes
|
|
7
8
|
from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
|
|
8
9
|
|
|
9
10
|
|
|
10
|
-
__version__ = "0.
|
|
11
|
+
__version__ = "0.22.0" # Updated version
|
|
11
12
|
|
|
12
13
|
# Optionally, you could define __all__ if you want to be explicit about exports
|
|
13
14
|
__all__ = [
|
|
@@ -16,6 +17,8 @@ __all__ = [
|
|
|
16
17
|
"MSG_TYPE",
|
|
17
18
|
"LollmsDiscussion",
|
|
18
19
|
"LollmsMessage",
|
|
20
|
+
"LollmsPersonality",
|
|
21
|
+
"LollmsDataManager",
|
|
19
22
|
"PromptReshaper",
|
|
20
23
|
"LollmsMCPBinding", # Export LollmsMCPBinding ABC
|
|
21
24
|
"LollmsMCPBindingManager", # Export LollmsMCPBindingManager
|
|
@@ -449,7 +449,7 @@ class OllamaBinding(LollmsLLMBinding):
|
|
|
449
449
|
"""
|
|
450
450
|
return {
|
|
451
451
|
"name": self.binding_name, # from super class
|
|
452
|
-
"version": ollama
|
|
452
|
+
"version": pm.get_installed_version("ollama") if ollama else "unknown", # Ollama library version
|
|
453
453
|
"host_address": self.host_address,
|
|
454
454
|
"model_name": self.model_name,
|
|
455
455
|
"supports_structured_output": False, # Ollama primarily supports text/chat
|
lollms_client/lollms_core.py
CHANGED
|
@@ -76,7 +76,9 @@ class LollmsClient():
|
|
|
76
76
|
n_threads: int = 8,
|
|
77
77
|
streaming_callback: Optional[Callable[[str, MSG_TYPE], None]] = None,
|
|
78
78
|
user_name ="user",
|
|
79
|
-
ai_name = "assistant"
|
|
79
|
+
ai_name = "assistant",
|
|
80
|
+
**kwargs
|
|
81
|
+
):
|
|
80
82
|
"""
|
|
81
83
|
Initialize the LollmsClient with LLM and optional modality bindings.
|
|
82
84
|
|
|
@@ -260,6 +262,81 @@ class LollmsClient():
|
|
|
260
262
|
self.end_ai_header_id_template =": "
|
|
261
263
|
self.end_ai_message_id_template =""
|
|
262
264
|
|
|
265
|
+
#
|
|
266
|
+
def update_llm_binding(self, binding_name: str, config: Optional[Dict[str, Any]] = None):
|
|
267
|
+
"""Update the LLM binding with a new configuration."""
|
|
268
|
+
self.binding = self.binding_manager.create_binding(
|
|
269
|
+
binding_name=binding_name,
|
|
270
|
+
host_address=self.host_address,
|
|
271
|
+
models_path=self.models_path,
|
|
272
|
+
model_name=self.binding.model_name, # Keep the same model name
|
|
273
|
+
service_key=self.service_key,
|
|
274
|
+
verify_ssl_certificate=self.verify_ssl_certificate,
|
|
275
|
+
**(config or {})
|
|
276
|
+
)
|
|
277
|
+
if self.binding is None:
|
|
278
|
+
available = self.binding_manager.get_available_bindings()
|
|
279
|
+
raise ValueError(f"Failed to update LLM binding: {binding_name}. Available: {available}")
|
|
280
|
+
|
|
281
|
+
def update_tts_binding(self, binding_name: str, config: Optional[Dict[str, Any]] = None):
|
|
282
|
+
"""Update the TTS binding with a new configuration."""
|
|
283
|
+
self.tts = self.tts_binding_manager.create_binding(
|
|
284
|
+
binding_name=binding_name,
|
|
285
|
+
**(config or {})
|
|
286
|
+
)
|
|
287
|
+
if self.tts is None:
|
|
288
|
+
available = self.tts_binding_manager.get_available_bindings()
|
|
289
|
+
raise ValueError(f"Failed to update TTS binding: {binding_name}. Available: {available}")
|
|
290
|
+
|
|
291
|
+
def update_tti_binding(self, binding_name: str, config: Optional[Dict[str, Any]] = None):
|
|
292
|
+
"""Update the TTI binding with a new configuration."""
|
|
293
|
+
self.tti = self.tti_binding_manager.create_binding(
|
|
294
|
+
binding_name=binding_name,
|
|
295
|
+
**(config or {})
|
|
296
|
+
)
|
|
297
|
+
if self.tti is None:
|
|
298
|
+
available = self.tti_binding_manager.get_available_bindings()
|
|
299
|
+
raise ValueError(f"Failed to update TTI binding: {binding_name}. Available: {available}")
|
|
300
|
+
|
|
301
|
+
def update_stt_binding(self, binding_name: str, config: Optional[Dict[str, Any]] = None):
|
|
302
|
+
"""Update the STT binding with a new configuration."""
|
|
303
|
+
self.stt = self.stt_binding_manager.create_binding(
|
|
304
|
+
binding_name=binding_name,
|
|
305
|
+
**(config or {})
|
|
306
|
+
)
|
|
307
|
+
if self.stt is None:
|
|
308
|
+
available = self.stt_binding_manager.get_available_bindings()
|
|
309
|
+
raise ValueError(f"Failed to update STT binding: {binding_name}. Available: {available}")
|
|
310
|
+
|
|
311
|
+
def update_ttv_binding(self, binding_name: str, config: Optional[Dict[str, Any]] = None):
|
|
312
|
+
"""Update the TTV binding with a new configuration."""
|
|
313
|
+
self.ttv = self.ttv_binding_manager.create_binding(
|
|
314
|
+
binding_name=binding_name,
|
|
315
|
+
**(config or {})
|
|
316
|
+
)
|
|
317
|
+
if self.ttv is None:
|
|
318
|
+
available = self.ttv_binding_manager.get_available_bindings()
|
|
319
|
+
raise ValueError(f"Failed to update TTV binding: {binding_name}. Available: {available}")
|
|
320
|
+
|
|
321
|
+
def update_ttm_binding(self, binding_name: str, config: Optional[Dict[str, Any]] = None):
|
|
322
|
+
"""Update the TTM binding with a new configuration."""
|
|
323
|
+
self.ttm = self.ttm_binding_manager.create_binding(
|
|
324
|
+
binding_name=binding_name,
|
|
325
|
+
**(config or {})
|
|
326
|
+
)
|
|
327
|
+
if self.ttm is None:
|
|
328
|
+
available = self.ttm_binding_manager.get_available_bindings()
|
|
329
|
+
raise ValueError(f"Failed to update TTM binding: {binding_name}. Available: {available}")
|
|
330
|
+
|
|
331
|
+
def update_mcp_binding(self, binding_name: str, config: Optional[Dict[str, Any]] = None):
|
|
332
|
+
"""Update the MCP binding with a new configuration."""
|
|
333
|
+
self.mcp = self.mcp_binding_manager.create_binding(
|
|
334
|
+
binding_name=binding_name,
|
|
335
|
+
**(config or {})
|
|
336
|
+
)
|
|
337
|
+
if self.mcp is None:
|
|
338
|
+
available = self.mcp_binding_manager.get_available_bindings()
|
|
339
|
+
raise ValueError(f"Failed to update MCP binding: {binding_name}. Available: {available}")
|
|
263
340
|
|
|
264
341
|
# --- Prompt Formatting Properties ---
|
|
265
342
|
@property
|
|
@@ -777,10 +854,17 @@ Don't forget encapsulate the code inside a html code tag. This is mandatory.
|
|
|
777
854
|
f'"{conversation_context}"'
|
|
778
855
|
)
|
|
779
856
|
initial_plan_gen = self.generate_text(prompt=obj_prompt, system_prompt=objective_extraction_system_prompt, temperature=0.0, stream=False)
|
|
857
|
+
if type(initial_plan_gen)!=str:
|
|
858
|
+
if "error" in initial_plan_gen:
|
|
859
|
+
ASCIIColors.error(initial_plan_gen["error"])
|
|
860
|
+
raise Exception(initial_plan_gen["error"])
|
|
861
|
+
else:
|
|
862
|
+
raise Exception("generate text failed. Make sure you are connected to the binding server if you are using remote one")
|
|
780
863
|
current_plan = self.remove_thinking_blocks(initial_plan_gen).strip()
|
|
781
864
|
|
|
782
865
|
if streaming_callback:
|
|
783
|
-
streaming_callback(
|
|
866
|
+
streaming_callback("Building initial plan...", MSG_TYPE.MSG_TYPE_STEP_END, {"id": "plan_extraction"}, turn_history)
|
|
867
|
+
streaming_callback(f"Current plan:\n{current_plan}", MSG_TYPE.MSG_TYPE_STEP, {"id": "plan"}, turn_history)
|
|
784
868
|
turn_history.append({"type": "initial_plan", "content": current_plan})
|
|
785
869
|
|
|
786
870
|
tool_calls_made_this_turn = []
|