lollms-client 0.21.0__py3-none-any.whl → 0.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

@@ -4,15 +4,18 @@ import base64
4
4
  import os
5
5
  import uuid
6
6
  import shutil
7
+ import re
7
8
  from collections import defaultdict
8
9
  from datetime import datetime
9
10
  from typing import List, Dict, Optional, Union, Any, Type, Callable
10
11
  from pathlib import Path
12
+ from types import SimpleNamespace
11
13
 
12
14
  from sqlalchemy import (create_engine, Column, String, Text, Integer, DateTime,
13
- ForeignKey, JSON, Boolean, LargeBinary, Index)
14
- from sqlalchemy.orm import sessionmaker, relationship, Session, declarative_base
15
+ ForeignKey, JSON, Boolean, LargeBinary, Index, Float)
16
+ from sqlalchemy.orm import sessionmaker, relationship, Session, declarative_base, declared_attr
15
17
  from sqlalchemy.types import TypeDecorator
18
+ from sqlalchemy.orm.exc import NoResultFound
16
19
 
17
20
  try:
18
21
  from cryptography.fernet import Fernet, InvalidToken
@@ -23,17 +26,21 @@ try:
23
26
  except ImportError:
24
27
  ENCRYPTION_AVAILABLE = False
25
28
 
29
+ from lollms_client.lollms_types import MSG_TYPE
30
+ # Type hint placeholders for classes defined externally
26
31
  if False:
27
32
  from lollms_client import LollmsClient
28
- from lollms_client.lollms_types import MSG_TYPE
33
+ from lollms_personality import LollmsPersonality
29
34
 
30
35
  class EncryptedString(TypeDecorator):
36
+ """A SQLAlchemy TypeDecorator for field-level database encryption."""
31
37
  impl = LargeBinary
32
38
  cache_ok = True
33
39
 
34
40
  def __init__(self, key: str, *args, **kwargs):
35
41
  super().__init__(*args, **kwargs)
36
- if not ENCRYPTION_AVAILABLE: raise ImportError("'cryptography' is required for DB encryption.")
42
+ if not ENCRYPTION_AVAILABLE:
43
+ raise ImportError("'cryptography' is required for DB encryption.")
37
44
  self.salt = b'lollms-fixed-salt-for-db-encryption'
38
45
  kdf = PBKDF2HMAC(
39
46
  algorithm=hashes.SHA256(), length=32, salt=self.salt,
@@ -43,21 +50,24 @@ class EncryptedString(TypeDecorator):
43
50
  self.fernet = Fernet(derived_key)
44
51
 
45
52
  def process_bind_param(self, value: Optional[str], dialect) -> Optional[bytes]:
46
- if value is None: return None
53
+ if value is None:
54
+ return None
47
55
  return self.fernet.encrypt(value.encode('utf-8'))
48
56
 
49
57
  def process_result_value(self, value: Optional[bytes], dialect) -> Optional[str]:
50
- if value is None: return None
58
+ if value is None:
59
+ return None
51
60
  try:
52
61
  return self.fernet.decrypt(value).decode('utf-8')
53
62
  except InvalidToken:
54
63
  return "<DECRYPTION_FAILED: Invalid Key or Corrupt Data>"
55
64
 
56
65
  def create_dynamic_models(discussion_mixin: Optional[Type] = None, message_mixin: Optional[Type] = None, encryption_key: Optional[str] = None):
66
+ """Factory to dynamically create SQLAlchemy ORM models with custom mixins."""
57
67
  Base = declarative_base()
58
68
  EncryptedText = EncryptedString(encryption_key) if encryption_key else Text
59
69
 
60
- class DiscussionBase(Base):
70
+ class DiscussionBase:
61
71
  __abstract__ = True
62
72
  id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
63
73
  system_prompt = Column(EncryptedText, nullable=True)
@@ -66,49 +76,50 @@ def create_dynamic_models(discussion_mixin: Optional[Type] = None, message_mixin
66
76
  discussion_metadata = Column(JSON, nullable=True, default=dict)
67
77
  created_at = Column(DateTime, default=datetime.utcnow)
68
78
  updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
79
+
80
+ @declared_attr
81
+ def messages(cls):
82
+ return relationship("Message", back_populates="discussion", cascade="all, delete-orphan", lazy="joined")
69
83
 
70
- class MessageBase(Base):
84
+ class MessageBase:
71
85
  __abstract__ = True
72
86
  id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
73
- discussion_id = Column(String, ForeignKey('discussions.id'), nullable=False)
74
- parent_id = Column(String, ForeignKey('messages.id'), nullable=True)
87
+ discussion_id = Column(String, ForeignKey('discussions.id'), nullable=False, index=True)
88
+ parent_id = Column(String, ForeignKey('messages.id'), nullable=True, index=True)
75
89
  sender = Column(String, nullable=False)
76
90
  sender_type = Column(String, nullable=False)
91
+
92
+ raw_content = Column(EncryptedText, nullable=True)
93
+ thoughts = Column(EncryptedText, nullable=True)
77
94
  content = Column(EncryptedText, nullable=False)
95
+ scratchpad = Column(EncryptedText, nullable=True)
96
+
97
+ tokens = Column(Integer, nullable=True)
98
+ binding_name = Column(String, nullable=True)
99
+ model_name = Column(String, nullable=True)
100
+ generation_speed = Column(Float, nullable=True)
101
+
78
102
  message_metadata = Column(JSON, nullable=True, default=dict)
79
103
  images = Column(JSON, nullable=True, default=list)
80
104
  created_at = Column(DateTime, default=datetime.utcnow)
81
-
82
- discussion_attrs = {'__tablename__': 'discussions'}
83
- if hasattr(discussion_mixin, '__table_args__'):
84
- discussion_attrs['__table_args__'] = discussion_mixin.__table_args__
85
- if discussion_mixin:
86
- for attr, col in discussion_mixin.__dict__.items():
87
- if isinstance(col, Column):
88
- discussion_attrs[attr] = col
89
-
90
- message_attrs = {'__tablename__': 'messages'}
91
- if hasattr(message_mixin, '__table_args__'):
92
- message_attrs['__table_args__'] = message_mixin.__table_args__
93
- if message_mixin:
94
- for attr, col in message_mixin.__dict__.items():
95
- if isinstance(col, Column):
96
- message_attrs[attr] = col
97
-
98
- discussion_bases = (discussion_mixin, DiscussionBase) if discussion_mixin else (DiscussionBase,)
99
- DynamicDiscussion = type('Discussion', discussion_bases, discussion_attrs)
105
+
106
+ @declared_attr
107
+ def discussion(cls):
108
+ return relationship("Discussion", back_populates="messages")
109
+
110
+ discussion_bases = (discussion_mixin, DiscussionBase, Base) if discussion_mixin else (DiscussionBase, Base)
111
+ DynamicDiscussion = type('Discussion', discussion_bases, {'__tablename__': 'discussions'})
100
112
 
101
- message_bases = (message_mixin, MessageBase) if message_mixin else (MessageBase,)
102
- DynamicMessage = type('Message', message_bases, message_attrs)
113
+ message_bases = (message_mixin, MessageBase, Base) if message_mixin else (MessageBase, Base)
114
+ DynamicMessage = type('Message', message_bases, {'__tablename__': 'messages'})
103
115
 
104
- DynamicDiscussion.messages = relationship(DynamicMessage, back_populates="discussion", cascade="all, delete-orphan", lazy="joined")
105
- DynamicMessage.discussion = relationship(DynamicDiscussion, back_populates="messages")
106
-
107
116
  return Base, DynamicDiscussion, DynamicMessage
108
117
 
109
- class DatabaseManager:
118
+ class LollmsDataManager:
119
+ """Manages database connection, session, and table creation."""
110
120
  def __init__(self, db_path: str, discussion_mixin: Optional[Type] = None, message_mixin: Optional[Type] = None, encryption_key: Optional[str] = None):
111
- if not db_path: raise ValueError("Database path cannot be empty.")
121
+ if not db_path:
122
+ raise ValueError("Database path cannot be empty.")
112
123
  self.Base, self.DiscussionModel, self.MessageModel = create_dynamic_models(
113
124
  discussion_mixin, message_mixin, encryption_key
114
125
  )
@@ -121,513 +132,579 @@ class DatabaseManager:
121
132
 
122
133
  def get_session(self) -> Session:
123
134
  return self.SessionLocal()
124
-
135
+
125
136
  def list_discussions(self) -> List[Dict]:
126
- session = self.get_session()
127
- discussions = session.query(self.DiscussionModel).all()
128
- session.close()
129
- discussion_list = []
130
- for disc in discussions:
131
- disc_dict = {c.name: getattr(disc, c.name) for c in disc.__table__.columns}
132
- discussion_list.append(disc_dict)
133
- return discussion_list
137
+ with self.get_session() as session:
138
+ discussions = session.query(self.DiscussionModel).all()
139
+ return [{c.name: getattr(disc, c.name) for c in disc.__table__.columns} for disc in discussions]
134
140
 
135
141
  def get_discussion(self, lollms_client: 'LollmsClient', discussion_id: str, **kwargs) -> Optional['LollmsDiscussion']:
136
- session = self.get_session()
137
- db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).first()
138
- session.close()
139
- if db_disc:
140
- return LollmsDiscussion(lollmsClient=lollms_client, discussion_id=discussion_id, db_manager=self, **kwargs)
141
- return None
142
+ with self.get_session() as session:
143
+ try:
144
+ db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).one()
145
+ session.expunge(db_disc)
146
+ return LollmsDiscussion(lollmsClient=lollms_client, db_manager=self, db_discussion_obj=db_disc, **kwargs)
147
+ except NoResultFound:
148
+ return None
142
149
 
143
150
  def search_discussions(self, **criteria) -> List[Dict]:
144
- session = self.get_session()
145
- query = session.query(self.DiscussionModel)
146
- for key, value in criteria.items():
147
- query = query.filter(getattr(self.DiscussionModel, key).ilike(f"%{value}%"))
148
- discussions = query.all()
149
- session.close()
150
- discussion_list = []
151
- for disc in discussions:
152
- disc_dict = {c.name: getattr(disc, c.name) for c in disc.__table__.columns}
153
- discussion_list.append(disc_dict)
154
- return discussion_list
151
+ with self.get_session() as session:
152
+ query = session.query(self.DiscussionModel)
153
+ for key, value in criteria.items():
154
+ if hasattr(self.DiscussionModel, key):
155
+ query = query.filter(getattr(self.DiscussionModel, key).ilike(f"%{value}%"))
156
+ discussions = query.all()
157
+ return [{c.name: getattr(disc, c.name) for c in disc.__table__.columns} for disc in discussions]
155
158
 
156
159
  def delete_discussion(self, discussion_id: str):
157
- session = self.get_session()
158
- db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).first()
159
- if db_disc:
160
- session.delete(db_disc)
161
- session.commit()
162
- session.close()
160
+ with self.get_session() as session:
161
+ db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).first()
162
+ if db_disc:
163
+ session.delete(db_disc)
164
+ session.commit()
165
+
166
+ class LollmsMessage:
167
+ """A wrapper for a message ORM object, providing direct attribute access."""
168
+ def __init__(self, discussion: 'LollmsDiscussion', db_message: Any):
169
+ object.__setattr__(self, '_discussion', discussion)
170
+ object.__setattr__(self, '_db_message', db_message)
171
+
172
+ def __getattr__(self, name: str) -> Any:
173
+ if name == 'metadata':
174
+ return getattr(self._db_message, 'message_metadata', None)
175
+ return getattr(self._db_message, name)
176
+
177
+ def __setattr__(self, name: str, value: Any):
178
+ if name == 'metadata':
179
+ setattr(self._db_message, 'message_metadata', value)
180
+ else:
181
+ setattr(self._db_message, name, value)
182
+ self._discussion.touch()
183
+
184
+ def __repr__(self) -> str:
185
+ return f"<LollmsMessage id={self.id} sender='{self.sender}'>"
163
186
 
164
187
  class LollmsDiscussion:
165
- def __init__(self, lollmsClient: 'LollmsClient', discussion_id: Optional[str] = None, db_manager: Optional[DatabaseManager] = None, autosave: bool = False, max_context_size: Optional[int] = None):
166
- self.lollmsClient = lollmsClient
167
- self.db_manager = db_manager
168
- self.autosave = autosave
169
- self.max_context_size = max_context_size
170
- self._is_db_backed = db_manager is not None
188
+ """Represents and manages a single discussion, acting as a high-level interface."""
189
+ def __init__(self, lollmsClient: 'LollmsClient', db_manager: Optional[LollmsDataManager] = None,
190
+ discussion_id: Optional[str] = None, db_discussion_obj: Optional[Any] = None,
191
+ autosave: bool = False, max_context_size: Optional[int] = None):
171
192
 
172
- self.session = None
173
- self.db_discussion = None
174
- self._messages_to_delete = []
175
-
176
- self._reset_in_memory_state()
193
+ object.__setattr__(self, 'lollmsClient', lollmsClient)
194
+ object.__setattr__(self, 'db_manager', db_manager)
195
+ object.__setattr__(self, 'autosave', autosave)
196
+ object.__setattr__(self, 'max_context_size', max_context_size)
197
+ object.__setattr__(self, 'scratchpad', "")
198
+ object.__setattr__(self, 'show_thoughts', False)
199
+ object.__setattr__(self, 'include_thoughts_in_context', False)
200
+ object.__setattr__(self, 'thought_placeholder', "<thought process hidden>")
201
+
202
+ object.__setattr__(self, '_session', None)
203
+ object.__setattr__(self, '_db_discussion', None)
204
+ object.__setattr__(self, '_message_index', None)
205
+ object.__setattr__(self, '_messages_to_delete_from_db', set())
206
+ object.__setattr__(self, '_is_db_backed', db_manager is not None)
177
207
 
178
208
  if self._is_db_backed:
179
- if not discussion_id: raise ValueError("A discussion_id is required for database-backed discussions.")
180
- self.session = db_manager.get_session()
181
- self._load_from_db(discussion_id)
182
- else:
183
- self.id = discussion_id or str(uuid.uuid4())
184
- self.created_at = datetime.utcnow()
185
- self.updated_at = self.created_at
186
-
187
- def _reset_in_memory_state(self):
188
- self.id: str = ""
189
- self.system_prompt: Optional[str] = None
190
- self.participants: Dict[str, str] = {}
191
- self.active_branch_id: Optional[str] = None
192
- self.metadata: Dict[str, Any] = {}
193
- self.scratchpad: str = ""
194
- self.messages: List[Dict] = []
195
- self.message_index: Dict[str, Dict] = {}
196
- self.created_at: Optional[datetime] = None
197
- self.updated_at: Optional[datetime] = None
198
-
199
- def _load_from_db(self, discussion_id: str):
200
- self.db_discussion = self.session.query(self.db_manager.DiscussionModel).filter(self.db_manager.DiscussionModel.id == discussion_id).one()
201
-
202
- self.id = self.db_discussion.id
203
- self.system_prompt = self.db_discussion.system_prompt
204
- self.participants = self.db_discussion.participants or {}
205
- self.active_branch_id = self.db_discussion.active_branch_id
206
- self.metadata = self.db_discussion.discussion_metadata or {}
207
-
208
- self.messages = []
209
- self.message_index = {}
210
- for msg in self.db_discussion.messages:
211
- msg_dict = {c.name: getattr(msg, c.name) for c in msg.__table__.columns}
212
- if 'message_metadata' in msg_dict:
213
- msg_dict['metadata'] = msg_dict.pop('message_metadata')
214
- self.messages.append(msg_dict)
215
- self.message_index[msg.id] = msg_dict
209
+ if not db_discussion_obj and not discussion_id:
210
+ raise ValueError("Either discussion_id or db_discussion_obj must be provided for DB-backed discussions.")
216
211
 
217
- def commit(self):
218
- if not self._is_db_backed or not self.session: return
219
-
220
- if self.db_discussion:
221
- self.db_discussion.system_prompt = self.system_prompt
222
- self.db_discussion.participants = self.participants
223
- self.db_discussion.active_branch_id = self.active_branch_id
224
- self.db_discussion.discussion_metadata = self.metadata
225
- self.db_discussion.updated_at = datetime.utcnow()
226
-
227
- for msg_id in self._messages_to_delete:
228
- msg_to_del = self.session.query(self.db_manager.MessageModel).filter_by(id=msg_id).first()
229
- if msg_to_del: self.session.delete(msg_to_del)
230
- self._messages_to_delete.clear()
231
-
232
- for msg_data in self.messages:
233
- msg_id = msg_data['id']
234
- msg_orm = self.session.query(self.db_manager.MessageModel).filter_by(id=msg_id).first()
235
-
236
- if 'metadata' in msg_data:
237
- msg_data['message_metadata'] = msg_data.pop('metadata',None)
238
-
239
- if not msg_orm:
240
- msg_data_copy = msg_data.copy()
241
- valid_keys = {c.name for c in self.db_manager.MessageModel.__table__.columns}
242
- filtered_msg_data = {k: v for k, v in msg_data_copy.items() if k in valid_keys}
243
- msg_orm = self.db_manager.MessageModel(**filtered_msg_data)
244
- self.session.add(msg_orm)
212
+ self._session = db_manager.get_session()
213
+ if db_discussion_obj:
214
+ self._db_discussion = self._session.merge(db_discussion_obj)
245
215
  else:
246
- for key, value in msg_data.items():
247
- if hasattr(msg_orm, key):
248
- setattr(msg_orm, key, value)
249
-
250
- self.session.commit()
251
-
252
- def touch(self):
253
- self.updated_at = datetime.utcnow()
254
- if self._is_db_backed and self.autosave:
255
- self.commit()
216
+ try:
217
+ self._db_discussion = self._session.query(db_manager.DiscussionModel).filter_by(id=discussion_id).one()
218
+ except NoResultFound:
219
+ self._session.close()
220
+ raise ValueError(f"No discussion found with ID: {discussion_id}")
221
+ else:
222
+ self._create_in_memory_proxy(id=discussion_id)
223
+ self._rebuild_message_index()
224
+
225
+ @property
226
+ def remaining_tokens(self) -> Optional[int]:
227
+ """Calculates the remaining tokens available in the context window."""
228
+ binding = self.lollmsClient.binding
229
+ if not binding or not hasattr(binding, 'ctx_size') or not binding.ctx_size:
230
+ return None
231
+ max_ctx = binding.ctx_size
232
+ current_prompt = self.format_discussion(max_ctx)
233
+ current_tokens = self.lollmsClient.count_tokens(current_prompt)
234
+ return max_ctx - current_tokens
256
235
 
257
236
  @classmethod
258
- def create_new(cls, lollms_client: 'LollmsClient', db_manager: Optional[DatabaseManager] = None, **kwargs) -> 'LollmsDiscussion':
237
+ def create_new(cls, lollms_client: 'LollmsClient', db_manager: Optional[LollmsDataManager] = None, **kwargs) -> 'LollmsDiscussion':
259
238
  init_args = {
260
239
  'autosave': kwargs.pop('autosave', False),
261
240
  'max_context_size': kwargs.pop('max_context_size', None)
262
241
  }
263
-
264
242
  if db_manager:
265
- session = db_manager.get_session()
266
- valid_keys = db_manager.DiscussionModel.__table__.columns.keys()
267
- db_creation_args = {k: v for k, v in kwargs.items() if k in valid_keys}
268
- db_discussion = db_manager.DiscussionModel(**db_creation_args)
269
- session.add(db_discussion)
270
- session.commit()
271
- return cls(lollmsClient=lollms_client, discussion_id=db_discussion.id, db_manager=db_manager, **init_args)
243
+ with db_manager.get_session() as session:
244
+ valid_keys = db_manager.DiscussionModel.__table__.columns.keys()
245
+ db_creation_args = {k: v for k, v in kwargs.items() if k in valid_keys}
246
+ db_discussion_orm = db_manager.DiscussionModel(**db_creation_args)
247
+ session.add(db_discussion_orm)
248
+ session.commit()
249
+ session.expunge(db_discussion_orm)
250
+ return cls(lollmsClient=lollms_client, db_manager=db_manager, db_discussion_obj=db_discussion_orm, **init_args)
272
251
  else:
273
- discussion_id = kwargs.get('discussion_id')
274
- return cls(lollmsClient=lollms_client, discussion_id=discussion_id, **init_args)
252
+ return cls(lollmsClient=lollms_client, discussion_id=kwargs.get('id'), **init_args)
253
+
254
+ def __getattr__(self, name: str) -> Any:
255
+ if name == 'metadata':
256
+ return getattr(self._db_discussion, 'discussion_metadata', None)
257
+ if name == 'messages':
258
+ return [LollmsMessage(self, msg) for msg in self._db_discussion.messages]
259
+ return getattr(self._db_discussion, name)
260
+
261
+ def __setattr__(self, name: str, value: Any):
262
+ internal_attrs = [
263
+ 'lollmsClient','db_manager','autosave','max_context_size','scratchpad',
264
+ 'show_thoughts', 'include_thoughts_in_context', 'thought_placeholder',
265
+ '_session','_db_discussion','_message_index','_messages_to_delete_from_db', '_is_db_backed'
266
+ ]
267
+ if name in internal_attrs:
268
+ object.__setattr__(self, name, value)
269
+ else:
270
+ if name == 'metadata':
271
+ setattr(self._db_discussion, 'discussion_metadata', value)
272
+ else:
273
+ setattr(self._db_discussion, name, value)
274
+ self.touch()
275
+
276
+ def _create_in_memory_proxy(self, id: Optional[str] = None):
277
+ proxy = SimpleNamespace()
278
+ proxy.id, proxy.system_prompt, proxy.participants = id or str(uuid.uuid4()), None, {}
279
+ proxy.active_branch_id, proxy.discussion_metadata = None, {}
280
+ proxy.created_at, proxy.updated_at = datetime.utcnow(), datetime.utcnow()
281
+ proxy.messages = []
282
+ object.__setattr__(self, '_db_discussion', proxy)
283
+
284
+ def _rebuild_message_index(self):
285
+ if self._is_db_backed and self._session.is_active and self._db_discussion in self._session:
286
+ self._session.refresh(self._db_discussion, ['messages'])
287
+ self._message_index = {msg.id: msg for msg in self._db_discussion.messages}
275
288
 
276
- def set_system_prompt(self, prompt: str):
277
- self.system_prompt = prompt
278
- self.touch()
289
+ def touch(self):
290
+ setattr(self._db_discussion, 'updated_at', datetime.utcnow())
291
+ if self._is_db_backed and self.autosave:
292
+ self.commit()
279
293
 
280
- def set_participants(self, participants: Dict[str, str]):
281
- for name, role in participants.items():
282
- if role not in ["user", "assistant", "system"]:
283
- raise ValueError(f"Invalid role '{role}' for participant '{name}'")
284
- self.participants = participants
285
- self.touch()
294
+ def commit(self):
295
+ if not self._is_db_backed or not self._session:
296
+ return
297
+ if self._messages_to_delete_from_db:
298
+ for msg_id in self._messages_to_delete_from_db:
299
+ msg_to_del = self._session.get(self.db_manager.MessageModel, msg_id)
300
+ if msg_to_del:
301
+ self._session.delete(msg_to_del)
302
+ self._messages_to_delete_from_db.clear()
303
+ try:
304
+ self._session.commit()
305
+ self._rebuild_message_index()
306
+ except Exception as e:
307
+ self._session.rollback()
308
+ raise e
286
309
 
287
- def add_message(self, **kwargs) -> Dict:
288
- msg_id = kwargs.get('id', str(uuid.uuid4()))
289
- parent_id = kwargs.get('parent_id', self.active_branch_id or None)
290
-
291
- message_data = {
292
- 'id': msg_id, 'parent_id': parent_id,
293
- 'discussion_id': self.id, 'created_at': datetime.utcnow(),
294
- **kwargs
295
- }
296
-
297
- self.messages.append(message_data)
298
- self.message_index[msg_id] = message_data
299
- self.active_branch_id = msg_id
310
+ def close(self):
311
+ if self._session:
312
+ self.commit()
313
+ self._session.close()
314
+
315
+ def add_message(self, **kwargs) -> LollmsMessage:
316
+ msg_id, parent_id = kwargs.get('id', str(uuid.uuid4())), kwargs.get('parent_id', self.active_branch_id)
317
+ message_data = {'id': msg_id, 'parent_id': parent_id, 'discussion_id': self.id, 'created_at': datetime.utcnow(), **kwargs}
318
+ if 'metadata' in message_data:
319
+ message_data['message_metadata'] = message_data.pop('metadata')
320
+ if self._is_db_backed:
321
+ valid_keys = {c.name for c in self.db_manager.MessageModel.__table__.columns}
322
+ filtered_data = {k: v for k, v in message_data.items() if k in valid_keys}
323
+ new_msg_orm = self.db_manager.MessageModel(**filtered_data)
324
+ self._db_discussion.messages.append(new_msg_orm)
325
+ if new_msg_orm not in self._session:
326
+ self._session.add(new_msg_orm)
327
+ else:
328
+ new_msg_orm = SimpleNamespace(**message_data)
329
+ self._db_discussion.messages.append(new_msg_orm)
330
+ self._message_index[msg_id], self.active_branch_id = new_msg_orm, msg_id
300
331
  self.touch()
301
- return message_data
302
-
303
- def get_branch(self, leaf_id: Optional[str]) -> List[Dict]:
304
- if not leaf_id: return []
305
- branch = []
306
- current_id: Optional[str] = leaf_id
307
- while current_id and current_id in self.message_index:
308
- msg = self.message_index[current_id]
309
- branch.append(msg)
310
- current_id = msg.get('parent_id')
311
- return list(reversed(branch))
332
+ return LollmsMessage(self, new_msg_orm)
312
333
 
313
- def chat(self, user_message: str, show_thoughts: bool = False, **kwargs) -> Dict:
334
+ def get_branch(self, leaf_id: Optional[str]) -> List[LollmsMessage]:
335
+ if not leaf_id:
336
+ return []
337
+ branch_orms, current_id = [], leaf_id
338
+ while current_id and current_id in self._message_index:
339
+ msg_orm = self._message_index[current_id]
340
+ branch_orms.append(msg_orm)
341
+ current_id = msg_orm.parent_id
342
+ return [LollmsMessage(self, orm) for orm in reversed(branch_orms)]
343
+
344
+
345
+
346
+ def chat(
347
+ self,
348
+ user_message: str,
349
+ personality: Optional['LollmsPersonality'] = None,
350
+ use_mcps: Union[None, bool, List[str]] = None,
351
+ use_data_store: Union[None, Dict[str, Callable]] = None,
352
+ build_plan: bool = True,
353
+ add_user_message: bool = True, # New parameter
354
+ max_tool_calls = 10,
355
+ rag_top_k = 5,
356
+ **kwargs
357
+ ) -> Dict[str, 'LollmsMessage']: # Return type changed
358
+ """
359
+ Main interaction method for the discussion. It can perform a simple chat or
360
+ trigger a complex agentic loop with RAG and MCP tool use.
361
+
362
+ Args:
363
+ user_message (str): The new message from the user.
364
+ personality (Optional[LollmsPersonality], optional): The personality to use. Defaults to None.
365
+ use_mcps (Union[None, bool, List[str]], optional): Controls MCP tool usage. Defaults to None.
366
+ use_data_store (Union[None, Dict[str, Callable]], optional): Controls RAG usage. Defaults to None.
367
+ build_plan (bool, optional): If True, the agent will generate an initial plan. Defaults to True.
368
+ add_user_message (bool, optional): If True, a new user message is created from the prompt.
369
+ If False, it assumes regeneration on the current active user message. Defaults to True.
370
+ **kwargs: Additional keyword arguments passed to the underlying generation method.
371
+
372
+ Returns:
373
+ Dict[str, LollmsMessage]: A dictionary with 'user_message' and 'ai_message' objects.
374
+ """
314
375
  if self.max_context_size is not None:
315
376
  self.summarize_and_prune(self.max_context_size)
316
-
317
- if user_message:
318
- self.add_message(sender="user", sender_type="user", content=user_message)
319
377
 
320
- from lollms_client.lollms_types import MSG_TYPE
378
+ # Add user message to the discussion or get the existing one
379
+ if add_user_message:
380
+ # Pass kwargs to capture images, etc., sent from the router
381
+ user_msg = self.add_message(sender="user", sender_type="user", content=user_message, **kwargs)
382
+ else:
383
+ # We are regenerating. The current active branch tip must be the user message.
384
+ if self.active_branch_id not in self._message_index:
385
+ raise ValueError("Regeneration failed: active branch tip not found or is invalid.")
386
+ user_msg_orm = self._message_index[self.active_branch_id]
387
+ if user_msg_orm.sender_type != 'user':
388
+ raise ValueError(f"Regeneration failed: active branch tip is a '{user_msg_orm.sender_type}' message, not 'user'.")
389
+ user_msg = LollmsMessage(self, user_msg_orm)
321
390
 
322
- is_streaming = "streaming_callback" in kwargs and kwargs["streaming_callback"] is not None
323
-
324
- if is_streaming:
325
- full_response_parts = []
326
- token_buffer = ""
327
- in_thought_block = False
328
- original_callback = kwargs.get("streaming_callback")
329
-
330
- def accumulating_callback(token: str, msg_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_CHUNK):
331
- nonlocal token_buffer, in_thought_block
332
- continue_streaming = True
333
-
334
- if token: token_buffer += token
335
-
336
- while True:
337
- if in_thought_block:
338
- end_tag_pos = token_buffer.find("</think>")
339
- if end_tag_pos != -1:
340
- thought_chunk = token_buffer[:end_tag_pos]
341
- if show_thoughts and original_callback and thought_chunk:
342
- if not original_callback(thought_chunk, MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK): continue_streaming = False
343
- in_thought_block = False
344
- token_buffer = token_buffer[end_tag_pos + len("</think>"):]
345
- else:
346
- if show_thoughts and original_callback and token_buffer:
347
- if not original_callback(token_buffer, MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK): continue_streaming = False
348
- token_buffer = ""
349
- break
350
- else:
351
- start_tag_pos = token_buffer.find("<think>")
352
- if start_tag_pos != -1:
353
- response_chunk = token_buffer[:start_tag_pos]
354
- if response_chunk:
355
- full_response_parts.append(response_chunk)
356
- if original_callback:
357
- if not original_callback(response_chunk, MSG_TYPE.MSG_TYPE_CHUNK): continue_streaming = False
358
- in_thought_block = True
359
- token_buffer = token_buffer[start_tag_pos + len("<think>"):]
360
- else:
361
- if token_buffer:
362
- full_response_parts.append(token_buffer)
363
- if original_callback:
364
- if not original_callback(token_buffer, MSG_TYPE.MSG_TYPE_CHUNK): continue_streaming = False
365
- token_buffer = ""
366
- break
367
- return continue_streaming
368
-
369
- kwargs["streaming_callback"] = accumulating_callback
370
- kwargs["stream"] = True
391
+ # --- (The existing generation logic remains the same) ---
392
+ is_agentic_turn = (use_mcps is not None and len(use_mcps)>0) or (use_data_store is not None and len(use_data_store)>0)
393
+ rag_context = None
394
+ original_system_prompt = self.system_prompt
395
+ if personality:
396
+ self.system_prompt = personality.system_prompt
397
+ if user_message and not is_agentic_turn:
398
+ rag_context = personality.get_rag_context(user_message)
399
+ if rag_context:
400
+ self.system_prompt = f"{original_system_prompt or ''}\n\n--- Relevant Information ---\n{rag_context}\n---"
401
+ start_time = datetime.now()
402
+ if is_agentic_turn:
403
+ # --- FIX: Provide the full conversation context to the agent ---
404
+ # 1. Get the model's max context size.
405
+ max_ctx = self.lollmsClient.binding.get_ctx_size(self.lollmsClient.binding.model_name) if self.lollmsClient.binding else None
371
406
 
372
- self.lollmsClient.chat(self, **kwargs)
373
- ai_response = "".join(full_response_parts)
407
+ # 2. Format the entire discussion up to this point, including the new user message.
408
+ # This ensures the agent has the full history.
409
+ full_context_prompt = self.format_discussion(max_allowed_tokens=max_ctx)
410
+
411
+ # 3. Call the agent with the complete context.
412
+ # We pass the full context to the 'prompt' argument. The `system_prompt` is already
413
+ # included within the formatted text, so we don't pass it separately to avoid duplication.
414
+ agent_result = self.lollmsClient.generate_with_mcp_rag(
415
+ prompt=full_context_prompt,
416
+ use_mcps=use_mcps,
417
+ use_data_store=use_data_store,
418
+ build_plan=build_plan,
419
+ max_tool_calls = max_tool_calls,
420
+ rag_top_k= rag_top_k,
421
+ **kwargs
422
+ )
423
+ final_content = agent_result.get("final_answer", "")
424
+ thoughts_text = None
425
+ final_raw_response = json.dumps(agent_result)
374
426
  else:
375
- kwargs["stream"] = False
376
- raw_response = self.lollmsClient.chat(self, **kwargs)
377
- ai_response = self.lollmsClient.remove_thinking_blocks(raw_response) if raw_response else ""
378
-
379
- ai_message_obj = self.add_message(sender="assistant", sender_type="assistant", content=ai_response)
380
-
381
- if self._is_db_backed and not self.autosave:
427
+ if personality and personality.script_module and hasattr(personality.script_module, 'run'):
428
+ try:
429
+ final_raw_response = personality.script_module.run(self, kwargs.get("streaming_callback"))
430
+ except Exception as e:
431
+ final_raw_response = f"Error executing personality script: {e}"
432
+ else:
433
+ is_streaming = "streaming_callback" in kwargs and kwargs.get("streaming_callback") is not None
434
+ if is_streaming:
435
+ raw_response_accumulator = self.lollmsClient.chat(self, **kwargs)
436
+ final_raw_response = "".join(raw_response_accumulator)
437
+ else:
438
+ kwargs["stream"] = False
439
+ final_raw_response = self.lollmsClient.chat(self, **kwargs) or ""
440
+ thoughts_match = re.search(r"<think>(.*?)</think>", final_raw_response, re.DOTALL)
441
+ thoughts_text = thoughts_match.group(1).strip() if thoughts_match else None
442
+ final_content = self.lollmsClient.remove_thinking_blocks(final_raw_response)
443
+ if rag_context or (personality and self.system_prompt != original_system_prompt):
444
+ self.system_prompt = original_system_prompt
445
+ end_time = datetime.now()
446
+ duration = (end_time - start_time).total_seconds()
447
+ token_count = self.lollmsClient.count_tokens(final_content)
448
+ tok_per_sec = (token_count / duration) if duration > 0 else 0
449
+ # --- (End of existing logic) ---
450
+
451
+ # --- FIX: Store agentic results in metadata ---
452
+ message_meta = {}
453
+ if is_agentic_turn and isinstance(agent_result, dict):
454
+ # We store the 'steps' and 'sources' if they exist in the agent result.
455
+ # This makes them available to the frontend in the final message object.
456
+ if "steps" in agent_result:
457
+ message_meta["steps"] = agent_result["steps"]
458
+ if "sources" in agent_result:
459
+ message_meta["sources"] = agent_result["sources"]
460
+
461
+ ai_message_obj = self.add_message(
462
+ sender=personality.name if personality else "assistant", sender_type="assistant", content=final_content,
463
+ raw_content=final_raw_response, thoughts=thoughts_text, tokens=token_count,
464
+ binding_name=self.lollmsClient.binding.binding_name, model_name=self.lollmsClient.binding.model_name,
465
+ generation_speed=tok_per_sec,
466
+ parent_id=user_msg.id, # Ensure the AI response is a child of the user message
467
+ metadata=message_meta # Pass the collected metadata here
468
+ )
469
+ if self._is_db_backed and self.autosave:
382
470
  self.commit()
383
471
 
384
- return ai_message_obj
472
+ return {"user_message": user_msg, "ai_message": ai_message_obj}
385
473
 
386
- def regenerate_branch(self, show_thoughts: bool = False, **kwargs) -> Dict:
387
- last_message = self.message_index.get(self.active_branch_id)
388
- if not last_message or last_message['sender_type'] != 'assistant':
389
- raise ValueError("Can only regenerate from an assistant's message.")
474
+ def regenerate_branch(self, **kwargs) -> Dict[str, 'LollmsMessage']:
475
+ if not self.active_branch_id or self.active_branch_id not in self._message_index:
476
+ raise ValueError("No active message to regenerate from.")
390
477
 
391
- parent_id = last_message['parent_id']
392
- self.active_branch_id = parent_id
478
+ last_message_orm = self._message_index[self.active_branch_id]
393
479
 
394
- self.messages = [m for m in self.messages if m['id'] != last_message['id']]
395
- self._messages_to_delete.append(last_message['id'])
396
- self._rebuild_in_memory_indexes()
480
+ # If the current active message is the assistant's, we need to delete it
481
+ # and set the active branch to its parent (the user message).
482
+ if last_message_orm.sender_type == 'assistant':
483
+ parent_id = last_message_orm.parent_id
484
+ if not parent_id:
485
+ raise ValueError("Cannot regenerate from an assistant message with no parent.")
486
+
487
+ last_message_id = last_message_orm.id
488
+ self._db_discussion.messages.remove(last_message_orm)
489
+ del self._message_index[last_message_id]
490
+ if self._is_db_backed:
491
+ self._messages_to_delete_from_db.add(last_message_id)
492
+
493
+ self.active_branch_id = parent_id
494
+ self.touch()
495
+
496
+ # The active branch is now guaranteed to be on a user message.
497
+ # Call chat, but do not add a new user message.
498
+ prompt_to_regenerate = self._message_index[self.active_branch_id].content
499
+ return self.chat(user_message=prompt_to_regenerate, add_user_message=False, **kwargs)
500
+
501
+ def process_and_summarize(self, large_text: str, user_prompt: str, chunk_size: int = 4096, **kwargs) -> LollmsMessage:
502
+ user_msg = self.add_message(sender="user", sender_type="user", content=user_prompt)
503
+ chunks = [large_text[i:i + chunk_size] for i in range(0, len(large_text), chunk_size)]
504
+ current_summary, total_chunks = "", len(chunks)
505
+ for i, chunk in enumerate(chunks):
506
+ print(f"\nProcessing chunk {i+1}/{total_chunks}...")
507
+ if i == 0:
508
+ prompt = f"""The user wants to know: "{user_prompt}"\nHere is the first part of the document (chunk 1 of {total_chunks}). \nRead it and create a detailed summary of all information relevant to the user's prompt.\n\nDOCUMENT CHUNK:\n---\n{chunk}\n---\nSUMMARY:"""
509
+ else:
510
+ prompt = f"""The user wants to know: "{user_prompt}"\nYou are processing a large document sequentially. Here is the summary of the previous chunks and the content of the next chunk ({i+1} of {total_chunks}).\nUpdate your summary by integrating new relevant information from the new chunk. Do not repeat information you already have. Output ONLY the new, updated, complete summary.\n\nPREVIOUS SUMMARY:\n---\n{current_summary}\n---\n\nNEW DOCUMENT CHUNK:\n---\n{chunk}\n---\nUPDATED SUMMARY:"""
511
+ current_summary = self.lollmsClient.generate_text(prompt, **kwargs).strip()
512
+ final_prompt = f"""Based on the following comprehensive summary of a document, provide a final answer to the user's original prompt.\nUser's prompt: "{user_prompt}"\n\nCOMPREHENSIVE SUMMARY:\n---\n{current_summary}\n---\nFINAL ANSWER:"""
513
+ final_answer = self.lollmsClient.generate_text(final_prompt, **kwargs).strip()
514
+ ai_message_obj = self.add_message(
515
+ sender="assistant", sender_type="assistant", content=final_answer,
516
+ scratchpad=current_summary, parent_id=user_msg.id
517
+ )
518
+ if self._is_db_backed and not self.autosave:
519
+ self.commit()
520
+ return ai_message_obj
397
521
 
398
- new_ai_response_obj = self.chat("", show_thoughts, **kwargs)
399
- return new_ai_response_obj
400
522
 
401
523
  def delete_branch(self, message_id: str):
402
524
  if not self._is_db_backed:
403
525
  raise NotImplementedError("Branch deletion is only supported for database-backed discussions.")
404
-
405
- if message_id not in self.message_index:
526
+ if message_id not in self._message_index:
406
527
  raise ValueError("Message not found.")
407
-
408
- msg_to_delete = self.session.query(self.db_manager.MessageModel).filter_by(id=message_id).first()
528
+ msg_to_delete = self._session.query(self.db_manager.MessageModel).filter_by(id=message_id).first()
409
529
  if msg_to_delete:
410
- parent_id = msg_to_delete.parent_id
411
- self.session.delete(msg_to_delete)
412
- self.active_branch_id = parent_id
530
+ self.active_branch_id = msg_to_delete.parent_id
531
+ self._session.delete(msg_to_delete)
413
532
  self.commit()
414
- self._load_from_db(self.id)
415
533
 
416
534
  def switch_to_branch(self, message_id: str):
417
- if message_id not in self.message_index:
535
+ if message_id not in self._message_index:
418
536
  raise ValueError(f"Message ID '{message_id}' not found in the current discussion.")
419
537
  self.active_branch_id = message_id
420
- if self._is_db_backed:
421
- self.db_discussion.active_branch_id = message_id
422
- if self.autosave: self.commit()
538
+ self.touch()
423
539
 
424
540
  def format_discussion(self, max_allowed_tokens: int, branch_tip_id: Optional[str] = None) -> str:
425
541
  return self.export("lollms_text", branch_tip_id, max_allowed_tokens)
426
542
 
427
543
  def _get_full_system_prompt(self) -> Optional[str]:
428
- full_sys_prompt_parts = []
544
+ parts = []
429
545
  if self.scratchpad:
430
- full_sys_prompt_parts.append("--- KNOWLEDGE SCRATCHPAD ---")
431
- full_sys_prompt_parts.append(self.scratchpad.strip())
432
- full_sys_prompt_parts.append("--- END SCRATCHPAD ---")
433
-
546
+ parts.extend(["--- KNOWLEDGE SCRATCHPAD ---", self.scratchpad.strip(), "--- END SCRATCHPAD ---"])
434
547
  if self.system_prompt and self.system_prompt.strip():
435
- full_sys_prompt_parts.append(self.system_prompt.strip())
436
-
437
- return "\n\n".join(full_sys_prompt_parts) if full_sys_prompt_parts else None
548
+ parts.append(self.system_prompt.strip())
549
+ return "\n\n".join(parts) if parts else None
438
550
 
439
551
  def export(self, format_type: str, branch_tip_id: Optional[str] = None, max_allowed_tokens: Optional[int] = None) -> Union[List[Dict], str]:
440
- if branch_tip_id is None: branch_tip_id = self.active_branch_id
552
+ branch_tip_id = branch_tip_id or self.active_branch_id
441
553
  if not branch_tip_id and format_type in ["lollms_text", "openai_chat", "ollama_chat"]:
442
554
  return "" if format_type == "lollms_text" else []
555
+ branch, full_system_prompt, participants = self.get_branch(branch_tip_id), self._get_full_system_prompt(), self.participants or {}
443
556
 
444
- branch = self.get_branch(branch_tip_id)
445
- full_system_prompt = self._get_full_system_prompt()
446
-
447
- participants = self.participants or {}
557
+ def get_full_content(msg: LollmsMessage) -> str:
558
+ content_to_use = msg.content
559
+ if self.include_thoughts_in_context and msg.sender_type == 'assistant' and msg.raw_content:
560
+ if self.thought_placeholder:
561
+ content_to_use = re.sub(r"<think>.*?</think>", f"<think>{self.thought_placeholder}</think>", msg.raw_content, flags=re.DOTALL)
562
+ else:
563
+ content_to_use = msg.raw_content
564
+
565
+ parts = [f"--- Internal Scratchpad ---\n{msg.scratchpad.strip()}\n---"] if msg.scratchpad and msg.scratchpad.strip() else []
566
+ parts.append(content_to_use.strip())
567
+ return "\n".join(parts)
448
568
 
449
569
  if format_type == "lollms_text":
450
- prompt_parts = []
451
- current_tokens = 0
452
-
570
+ prompt_parts, current_tokens = [], 0
453
571
  if full_system_prompt:
454
572
  sys_msg_text = f"!@>system:\n{full_system_prompt}\n"
455
573
  sys_tokens = self.lollmsClient.count_tokens(sys_msg_text)
456
574
  if max_allowed_tokens is None or sys_tokens <= max_allowed_tokens:
457
575
  prompt_parts.append(sys_msg_text)
458
576
  current_tokens += sys_tokens
459
-
460
577
  for msg in reversed(branch):
461
- sender_str = msg['sender'].replace(':', '').replace('!@>', '')
462
- content = msg['content'].strip()
463
- if msg.get('images'): content += f"\n({len(msg['images'])} image(s) attached)"
578
+ sender_str = msg.sender.replace(':', '').replace('!@>', '')
579
+ content = get_full_content(msg)
580
+ if msg.images:
581
+ content += f"\n({len(msg.images)} image(s) attached)"
464
582
  msg_text = f"!@>{sender_str}:\n{content}\n"
465
583
  msg_tokens = self.lollmsClient.count_tokens(msg_text)
466
-
467
- if max_allowed_tokens is not None and current_tokens + msg_tokens > max_allowed_tokens: break
584
+ if max_allowed_tokens is not None and current_tokens + msg_tokens > max_allowed_tokens:
585
+ break
468
586
  prompt_parts.insert(1 if full_system_prompt else 0, msg_text)
469
587
  current_tokens += msg_tokens
470
588
  return "".join(prompt_parts).strip()
471
-
589
+
472
590
  messages = []
473
591
  if full_system_prompt:
474
592
  messages.append({"role": "system", "content": full_system_prompt})
475
-
476
593
  for msg in branch:
477
- role = participants.get(msg['sender'], "user")
478
- content = msg.get('content', '').strip()
479
- images = msg.get('images', [])
480
-
594
+ role, content, images = participants.get(msg.sender, "user"), get_full_content(msg), msg.images or []
481
595
  if format_type == "openai_chat":
482
596
  if images:
483
597
  content_parts = [{"type": "text", "text": content}] if content else []
484
598
  for img in images:
485
- image_url = img['data'] if img['type'] == 'url' else f"data:image/jpeg;base64,{img['data']}"
486
- content_parts.append({"type": "image_url", "image_url": {"url": image_url, "detail": "auto"}})
599
+ content_parts.append({"type": "image_url", "image_url": {"url": img['data'] if img['type'] == 'url' else f"data:image/jpeg;base64,{img['data']}", "detail": "auto"}})
487
600
  messages.append({"role": role, "content": content_parts})
488
601
  else:
489
602
  messages.append({"role": role, "content": content})
490
603
  elif format_type == "ollama_chat":
491
604
  message_dict = {"role": role, "content": content}
492
- base64_images = [img['data'] for img in images or [] if img['type'] == 'base64']
605
+ base64_images = [img['data'] for img in images if img['type'] == 'base64']
493
606
  if base64_images:
494
607
  message_dict["images"] = base64_images
495
608
  messages.append(message_dict)
496
609
  else:
497
610
  raise ValueError(f"Unsupported export format_type: {format_type}")
498
-
499
611
  return messages
500
612
 
501
613
  def summarize_and_prune(self, max_tokens: int, preserve_last_n: int = 4):
502
614
  branch_tip_id = self.active_branch_id
503
- if not branch_tip_id: return
504
-
505
- current_prompt_text = self.format_discussion(999999, branch_tip_id)
506
- current_tokens = self.lollmsClient.count_tokens(current_prompt_text)
507
- if current_tokens <= max_tokens: return
508
-
615
+ if not branch_tip_id:
616
+ return
617
+ current_tokens = self.lollmsClient.count_tokens(self.format_discussion(999999, branch_tip_id))
618
+ if current_tokens <= max_tokens:
619
+ return
509
620
  branch = self.get_branch(branch_tip_id)
510
- if len(branch) <= preserve_last_n: return
511
-
621
+ if len(branch) <= preserve_last_n:
622
+ return
512
623
  messages_to_prune = branch[:-preserve_last_n]
513
- text_to_summarize = "\n\n".join([f"{m['sender']}: {m['content']}" for m in messages_to_prune])
514
-
624
+ text_to_summarize = "\n\n".join([f"{m.sender}: {m.content}" for m in messages_to_prune])
515
625
  summary_prompt = f"Concisely summarize this conversation excerpt:\n---\n{text_to_summarize}\n---\nSUMMARY:"
516
626
  try:
517
627
  summary = self.lollmsClient.generate_text(summary_prompt, n_predict=300, temperature=0.1)
518
628
  except Exception as e:
519
629
  print(f"\n[WARNING] Pruning failed, couldn't generate summary: {e}")
520
630
  return
521
-
522
- new_scratchpad_content = f"{self.scratchpad}\n\n--- Summary of earlier conversation ---\n{summary.strip()}"
523
- self.scratchpad = new_scratchpad_content.strip()
524
-
525
- pruned_ids = {msg['id'] for msg in messages_to_prune}
526
- self.messages = [m for m in self.messages if m['id'] not in pruned_ids]
527
- self._messages_to_delete.extend(list(pruned_ids))
528
- self._rebuild_in_memory_indexes()
529
-
631
+ self.scratchpad = f"{self.scratchpad}\n\n--- Summary of earlier conversation ---\n{summary.strip()}".strip()
632
+ pruned_ids = {msg.id for msg in messages_to_prune}
633
+ if self._is_db_backed:
634
+ self._messages_to_delete_from_db.update(pruned_ids)
635
+ self._db_discussion.messages = [m for m in self._db_discussion.messages if m.id not in pruned_ids]
636
+ else:
637
+ self._db_discussion.messages = [m for m in self._db_discussion.messages if m.id not in pruned_ids]
638
+ self._rebuild_message_index()
639
+ self.touch()
530
640
  print(f"\n[INFO] Discussion auto-pruned. {len(messages_to_prune)} messages summarized.")
531
641
 
532
642
  def to_dict(self):
533
- messages_copy = [msg.copy() for msg in self.messages]
534
- for msg in messages_copy:
535
- if 'created_at' in msg and isinstance(msg['created_at'], datetime):
536
- msg['created_at'] = msg['created_at'].isoformat()
537
- if 'message_metadata' in msg:
538
- msg['metadata'] = msg.pop('message_metadata')
539
-
540
643
  return {
541
- "id": self.id, "system_prompt": self.system_prompt,
542
- "participants": self.participants, "active_branch_id": self.active_branch_id,
543
- "metadata": self.metadata, "scratchpad": self.scratchpad,
544
- "messages": messages_copy,
644
+ "id": self.id, "system_prompt": self.system_prompt, "participants": self.participants,
645
+ "active_branch_id": self.active_branch_id, "metadata": self.metadata, "scratchpad": self.scratchpad,
646
+ "messages": [{ 'id': m.id, 'parent_id': m.parent_id, 'discussion_id': m.discussion_id, 'sender': m.sender,
647
+ 'sender_type': m.sender_type, 'content': m.content, 'scratchpad': m.scratchpad, 'images': m.images,
648
+ 'created_at': m.created_at.isoformat(), 'metadata': m.metadata } for m in self.messages],
545
649
  "created_at": self.created_at.isoformat() if self.created_at else None,
546
650
  "updated_at": self.updated_at.isoformat() if self.updated_at else None
547
651
  }
548
652
 
549
653
  def load_from_dict(self, data: Dict):
550
- self._reset_in_memory_state()
551
- self.id = data.get("id", str(uuid.uuid4()))
552
- self.system_prompt = data.get("system_prompt")
553
- self.participants = data.get("participants", {})
554
- self.active_branch_id = data.get("active_branch_id")
555
- self.metadata = data.get("metadata", {})
654
+ self._create_in_memory_proxy(id=data.get("id"))
655
+ self.system_prompt, self.participants = data.get("system_prompt"), data.get("participants", {})
656
+ self.active_branch_id, self.metadata = data.get("active_branch_id"), data.get("metadata", {})
556
657
  self.scratchpad = data.get("scratchpad", "")
557
-
558
- loaded_messages = data.get("messages", [])
559
- for msg in loaded_messages:
560
- if 'created_at' in msg and isinstance(msg['created_at'], str):
658
+ for msg_data in data.get("messages", []):
659
+ if 'created_at' in msg_data and isinstance(msg_data['created_at'], str):
561
660
  try:
562
- msg['created_at'] = datetime.fromisoformat(msg['created_at'])
661
+ msg_data['created_at'] = datetime.fromisoformat(msg_data['created_at'])
563
662
  except ValueError:
564
- msg['created_at'] = datetime.utcnow()
565
- self.messages = loaded_messages
566
-
663
+ msg_data['created_at'] = datetime.utcnow()
664
+ self.add_message(**msg_data)
567
665
  self.created_at = datetime.fromisoformat(data['created_at']) if data.get('created_at') else datetime.utcnow()
568
666
  self.updated_at = datetime.fromisoformat(data['updated_at']) if data.get('updated_at') else self.created_at
569
- self._rebuild_in_memory_indexes()
570
-
571
- def _rebuild_in_memory_indexes(self):
572
- self.message_index = {msg['id']: msg for msg in self.messages}
573
667
 
574
668
  @staticmethod
575
- def migrate(lollms_client: 'LollmsClient', db_manager: DatabaseManager, folder_path: Union[str, Path]):
669
+ def migrate(lollms_client: 'LollmsClient', db_manager: LollmsDataManager, folder_path: Union[str, Path]):
576
670
  folder = Path(folder_path)
577
671
  if not folder.is_dir():
578
672
  print(f"Error: Path '{folder}' is not a valid directory.")
579
673
  return
580
-
581
674
  print(f"\n--- Starting Migration from '{folder}' ---")
582
- discussion_files = list(folder.glob("*.json")) + list(folder.glob("*.yaml"))
583
- session = db_manager.get_session()
584
- for i, file_path in enumerate(discussion_files):
585
- print(f"Migrating file {i+1}/{len(discussion_files)}: {file_path.name} ... ", end="")
586
- try:
587
- in_memory_discussion = LollmsDiscussion.create_new(lollms_client=lollms_client)
588
- if file_path.suffix.lower() == ".json":
589
- with open(file_path, 'r', encoding='utf-8') as f: data = json.load(f)
590
- else:
591
- with open(file_path, 'r', encoding='utf-8') as f: data = yaml.safe_load(f)
592
-
593
- in_memory_discussion.load_from_dict(data)
594
- discussion_id = in_memory_discussion.id
595
-
596
- existing = session.query(db_manager.DiscussionModel).filter_by(id=discussion_id).first()
597
- if existing:
598
- print("SKIPPED (already exists)")
675
+ files = list(folder.glob("*.json")) + list(folder.glob("*.yaml"))
676
+ with db_manager.get_session() as session:
677
+ valid_disc_keys = {c.name for c in db_manager.DiscussionModel.__table__.columns}
678
+ valid_msg_keys = {c.name for c in db_manager.MessageModel.__table__.columns}
679
+ for i, file_path in enumerate(files):
680
+ print(f"Migrating file {i+1}/{len(files)}: {file_path.name} ... ", end="")
681
+ try:
682
+ data = yaml.safe_load(file_path.read_text(encoding='utf-8'))
683
+ discussion_id = data.get("id", str(uuid.uuid4()))
684
+ if session.query(db_manager.DiscussionModel).filter_by(id=discussion_id).first():
685
+ print("SKIPPED (already exists)")
686
+ continue
687
+ discussion_data = data.copy()
688
+ if 'metadata' in discussion_data:
689
+ discussion_data['discussion_metadata'] = discussion_data.pop('metadata')
690
+ for key in ['created_at', 'updated_at']:
691
+ if key in discussion_data and isinstance(discussion_data[key], str):
692
+ discussion_data[key] = datetime.fromisoformat(discussion_data[key])
693
+ db_discussion = db_manager.DiscussionModel(**{k: v for k, v in discussion_data.items() if k in valid_disc_keys})
694
+ session.add(db_discussion)
695
+ for msg_data in data.get("messages", []):
696
+ msg_data['discussion_id'] = db_discussion.id
697
+ if 'metadata' in msg_data:
698
+ msg_data['message_metadata'] = msg_data.pop('metadata')
699
+ if 'created_at' in msg_data and isinstance(msg_data['created_at'], str):
700
+ msg_data['created_at'] = datetime.fromisoformat(msg_data['created_at'])
701
+ msg_orm = db_manager.MessageModel(**{k: v for k, v in msg_data.items() if k in valid_msg_keys})
702
+ session.add(msg_orm)
703
+ session.flush()
704
+ print("OK")
705
+ except Exception as e:
706
+ print(f"FAILED. Error: {e}")
707
+ session.rollback()
599
708
  continue
600
-
601
- valid_disc_keys = {c.name for c in db_manager.DiscussionModel.__table__.columns}
602
- valid_msg_keys = {c.name for c in db_manager.MessageModel.__table__.columns}
603
-
604
- discussion_data = {
605
- 'id': in_memory_discussion.id,
606
- 'system_prompt': in_memory_discussion.system_prompt,
607
- 'participants': in_memory_discussion.participants,
608
- 'active_branch_id': in_memory_discussion.active_branch_id,
609
- 'discussion_metadata': in_memory_discussion.metadata,
610
- 'created_at': in_memory_discussion.created_at,
611
- 'updated_at': in_memory_discussion.updated_at
612
- }
613
- project_name = in_memory_discussion.metadata.get('project_name', file_path.stem)
614
- if 'project_name' in valid_disc_keys:
615
- discussion_data['project_name'] = project_name
616
-
617
- db_discussion = db_manager.DiscussionModel(**discussion_data)
618
- session.add(db_discussion)
619
-
620
- for msg_data in in_memory_discussion.messages:
621
- msg_data['discussion_id'] = db_discussion.id
622
- if 'metadata' in msg_data:
623
- msg_data['message_metadata'] = msg_data.pop('metadata')
624
- filtered_msg_data = {k: v for k, v in msg_data.items() if k in valid_msg_keys}
625
- msg_orm = db_manager.MessageModel(**filtered_msg_data)
626
- session.add(msg_orm)
627
-
628
- print("OK")
629
- except Exception as e:
630
- print(f"FAILED. Error: {e}")
631
- session.rollback()
632
- session.commit()
633
- session.close()
709
+ session.commit()
710
+ print("--- Migration Finished ---")