lollms-client 0.21.0__py3-none-any.whl → 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lollms-client might be problematic. Click here for more details.

@@ -4,15 +4,18 @@ import base64
4
4
  import os
5
5
  import uuid
6
6
  import shutil
7
+ import re
7
8
  from collections import defaultdict
8
9
  from datetime import datetime
9
10
  from typing import List, Dict, Optional, Union, Any, Type, Callable
10
11
  from pathlib import Path
12
+ from types import SimpleNamespace
11
13
 
12
14
  from sqlalchemy import (create_engine, Column, String, Text, Integer, DateTime,
13
- ForeignKey, JSON, Boolean, LargeBinary, Index)
14
- from sqlalchemy.orm import sessionmaker, relationship, Session, declarative_base
15
+ ForeignKey, JSON, Boolean, LargeBinary, Index, Float)
16
+ from sqlalchemy.orm import sessionmaker, relationship, Session, declarative_base, declared_attr
15
17
  from sqlalchemy.types import TypeDecorator
18
+ from sqlalchemy.orm.exc import NoResultFound
16
19
 
17
20
  try:
18
21
  from cryptography.fernet import Fernet, InvalidToken
@@ -23,17 +26,21 @@ try:
23
26
  except ImportError:
24
27
  ENCRYPTION_AVAILABLE = False
25
28
 
29
+ # Type hint placeholders for classes defined externally
26
30
  if False:
27
31
  from lollms_client import LollmsClient
28
32
  from lollms_client.lollms_types import MSG_TYPE
33
+ from lollms_personality import LollmsPersonality
29
34
 
30
35
  class EncryptedString(TypeDecorator):
36
+ """A SQLAlchemy TypeDecorator for field-level database encryption."""
31
37
  impl = LargeBinary
32
38
  cache_ok = True
33
39
 
34
40
  def __init__(self, key: str, *args, **kwargs):
35
41
  super().__init__(*args, **kwargs)
36
- if not ENCRYPTION_AVAILABLE: raise ImportError("'cryptography' is required for DB encryption.")
42
+ if not ENCRYPTION_AVAILABLE:
43
+ raise ImportError("'cryptography' is required for DB encryption.")
37
44
  self.salt = b'lollms-fixed-salt-for-db-encryption'
38
45
  kdf = PBKDF2HMAC(
39
46
  algorithm=hashes.SHA256(), length=32, salt=self.salt,
@@ -43,21 +50,24 @@ class EncryptedString(TypeDecorator):
43
50
  self.fernet = Fernet(derived_key)
44
51
 
45
52
  def process_bind_param(self, value: Optional[str], dialect) -> Optional[bytes]:
46
- if value is None: return None
53
+ if value is None:
54
+ return None
47
55
  return self.fernet.encrypt(value.encode('utf-8'))
48
56
 
49
57
  def process_result_value(self, value: Optional[bytes], dialect) -> Optional[str]:
50
- if value is None: return None
58
+ if value is None:
59
+ return None
51
60
  try:
52
61
  return self.fernet.decrypt(value).decode('utf-8')
53
62
  except InvalidToken:
54
63
  return "<DECRYPTION_FAILED: Invalid Key or Corrupt Data>"
55
64
 
56
65
  def create_dynamic_models(discussion_mixin: Optional[Type] = None, message_mixin: Optional[Type] = None, encryption_key: Optional[str] = None):
66
+ """Factory to dynamically create SQLAlchemy ORM models with custom mixins."""
57
67
  Base = declarative_base()
58
68
  EncryptedText = EncryptedString(encryption_key) if encryption_key else Text
59
69
 
60
- class DiscussionBase(Base):
70
+ class DiscussionBase:
61
71
  __abstract__ = True
62
72
  id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
63
73
  system_prompt = Column(EncryptedText, nullable=True)
@@ -66,49 +76,50 @@ def create_dynamic_models(discussion_mixin: Optional[Type] = None, message_mixin
66
76
  discussion_metadata = Column(JSON, nullable=True, default=dict)
67
77
  created_at = Column(DateTime, default=datetime.utcnow)
68
78
  updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
79
+
80
+ @declared_attr
81
+ def messages(cls):
82
+ return relationship("Message", back_populates="discussion", cascade="all, delete-orphan", lazy="joined")
69
83
 
70
- class MessageBase(Base):
84
+ class MessageBase:
71
85
  __abstract__ = True
72
86
  id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
73
- discussion_id = Column(String, ForeignKey('discussions.id'), nullable=False)
74
- parent_id = Column(String, ForeignKey('messages.id'), nullable=True)
87
+ discussion_id = Column(String, ForeignKey('discussions.id'), nullable=False, index=True)
88
+ parent_id = Column(String, ForeignKey('messages.id'), nullable=True, index=True)
75
89
  sender = Column(String, nullable=False)
76
90
  sender_type = Column(String, nullable=False)
91
+
92
+ raw_content = Column(EncryptedText, nullable=True)
93
+ thoughts = Column(EncryptedText, nullable=True)
77
94
  content = Column(EncryptedText, nullable=False)
95
+ scratchpad = Column(EncryptedText, nullable=True)
96
+
97
+ tokens = Column(Integer, nullable=True)
98
+ binding_name = Column(String, nullable=True)
99
+ model_name = Column(String, nullable=True)
100
+ generation_speed = Column(Float, nullable=True)
101
+
78
102
  message_metadata = Column(JSON, nullable=True, default=dict)
79
103
  images = Column(JSON, nullable=True, default=list)
80
104
  created_at = Column(DateTime, default=datetime.utcnow)
81
-
82
- discussion_attrs = {'__tablename__': 'discussions'}
83
- if hasattr(discussion_mixin, '__table_args__'):
84
- discussion_attrs['__table_args__'] = discussion_mixin.__table_args__
85
- if discussion_mixin:
86
- for attr, col in discussion_mixin.__dict__.items():
87
- if isinstance(col, Column):
88
- discussion_attrs[attr] = col
89
-
90
- message_attrs = {'__tablename__': 'messages'}
91
- if hasattr(message_mixin, '__table_args__'):
92
- message_attrs['__table_args__'] = message_mixin.__table_args__
93
- if message_mixin:
94
- for attr, col in message_mixin.__dict__.items():
95
- if isinstance(col, Column):
96
- message_attrs[attr] = col
97
-
98
- discussion_bases = (discussion_mixin, DiscussionBase) if discussion_mixin else (DiscussionBase,)
99
- DynamicDiscussion = type('Discussion', discussion_bases, discussion_attrs)
105
+
106
+ @declared_attr
107
+ def discussion(cls):
108
+ return relationship("Discussion", back_populates="messages")
109
+
110
+ discussion_bases = (discussion_mixin, DiscussionBase, Base) if discussion_mixin else (DiscussionBase, Base)
111
+ DynamicDiscussion = type('Discussion', discussion_bases, {'__tablename__': 'discussions'})
100
112
 
101
- message_bases = (message_mixin, MessageBase) if message_mixin else (MessageBase,)
102
- DynamicMessage = type('Message', message_bases, message_attrs)
113
+ message_bases = (message_mixin, MessageBase, Base) if message_mixin else (MessageBase, Base)
114
+ DynamicMessage = type('Message', message_bases, {'__tablename__': 'messages'})
103
115
 
104
- DynamicDiscussion.messages = relationship(DynamicMessage, back_populates="discussion", cascade="all, delete-orphan", lazy="joined")
105
- DynamicMessage.discussion = relationship(DynamicDiscussion, back_populates="messages")
106
-
107
116
  return Base, DynamicDiscussion, DynamicMessage
108
117
 
109
- class DatabaseManager:
118
+ class LollmsDataManager:
119
+ """Manages database connection, session, and table creation."""
110
120
  def __init__(self, db_path: str, discussion_mixin: Optional[Type] = None, message_mixin: Optional[Type] = None, encryption_key: Optional[str] = None):
111
- if not db_path: raise ValueError("Database path cannot be empty.")
121
+ if not db_path:
122
+ raise ValueError("Database path cannot be empty.")
112
123
  self.Base, self.DiscussionModel, self.MessageModel = create_dynamic_models(
113
124
  discussion_mixin, message_mixin, encryption_key
114
125
  )
@@ -121,513 +132,533 @@ class DatabaseManager:
121
132
 
122
133
  def get_session(self) -> Session:
123
134
  return self.SessionLocal()
124
-
135
+
125
136
  def list_discussions(self) -> List[Dict]:
126
- session = self.get_session()
127
- discussions = session.query(self.DiscussionModel).all()
128
- session.close()
129
- discussion_list = []
130
- for disc in discussions:
131
- disc_dict = {c.name: getattr(disc, c.name) for c in disc.__table__.columns}
132
- discussion_list.append(disc_dict)
133
- return discussion_list
137
+ with self.get_session() as session:
138
+ discussions = session.query(self.DiscussionModel).all()
139
+ return [{c.name: getattr(disc, c.name) for c in disc.__table__.columns} for disc in discussions]
134
140
 
135
141
  def get_discussion(self, lollms_client: 'LollmsClient', discussion_id: str, **kwargs) -> Optional['LollmsDiscussion']:
136
- session = self.get_session()
137
- db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).first()
138
- session.close()
139
- if db_disc:
140
- return LollmsDiscussion(lollmsClient=lollms_client, discussion_id=discussion_id, db_manager=self, **kwargs)
141
- return None
142
+ with self.get_session() as session:
143
+ try:
144
+ db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).one()
145
+ session.expunge(db_disc)
146
+ return LollmsDiscussion(lollmsClient=lollms_client, db_manager=self, db_discussion_obj=db_disc, **kwargs)
147
+ except NoResultFound:
148
+ return None
142
149
 
143
150
  def search_discussions(self, **criteria) -> List[Dict]:
144
- session = self.get_session()
145
- query = session.query(self.DiscussionModel)
146
- for key, value in criteria.items():
147
- query = query.filter(getattr(self.DiscussionModel, key).ilike(f"%{value}%"))
148
- discussions = query.all()
149
- session.close()
150
- discussion_list = []
151
- for disc in discussions:
152
- disc_dict = {c.name: getattr(disc, c.name) for c in disc.__table__.columns}
153
- discussion_list.append(disc_dict)
154
- return discussion_list
151
+ with self.get_session() as session:
152
+ query = session.query(self.DiscussionModel)
153
+ for key, value in criteria.items():
154
+ if hasattr(self.DiscussionModel, key):
155
+ query = query.filter(getattr(self.DiscussionModel, key).ilike(f"%{value}%"))
156
+ discussions = query.all()
157
+ return [{c.name: getattr(disc, c.name) for c in disc.__table__.columns} for disc in discussions]
155
158
 
156
159
  def delete_discussion(self, discussion_id: str):
157
- session = self.get_session()
158
- db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).first()
159
- if db_disc:
160
- session.delete(db_disc)
161
- session.commit()
162
- session.close()
160
+ with self.get_session() as session:
161
+ db_disc = session.query(self.DiscussionModel).filter_by(id=discussion_id).first()
162
+ if db_disc:
163
+ session.delete(db_disc)
164
+ session.commit()
165
+
166
+ class LollmsMessage:
167
+ """A wrapper for a message ORM object, providing direct attribute access."""
168
+ def __init__(self, discussion: 'LollmsDiscussion', db_message: Any):
169
+ object.__setattr__(self, '_discussion', discussion)
170
+ object.__setattr__(self, '_db_message', db_message)
171
+
172
+ def __getattr__(self, name: str) -> Any:
173
+ if name == 'metadata':
174
+ return getattr(self._db_message, 'message_metadata', None)
175
+ return getattr(self._db_message, name)
176
+
177
+ def __setattr__(self, name: str, value: Any):
178
+ if name == 'metadata':
179
+ setattr(self._db_message, 'message_metadata', value)
180
+ else:
181
+ setattr(self._db_message, name, value)
182
+ self._discussion.touch()
183
+
184
+ def __repr__(self) -> str:
185
+ return f"<LollmsMessage id={self.id} sender='{self.sender}'>"
163
186
 
164
187
  class LollmsDiscussion:
165
- def __init__(self, lollmsClient: 'LollmsClient', discussion_id: Optional[str] = None, db_manager: Optional[DatabaseManager] = None, autosave: bool = False, max_context_size: Optional[int] = None):
166
- self.lollmsClient = lollmsClient
167
- self.db_manager = db_manager
168
- self.autosave = autosave
169
- self.max_context_size = max_context_size
170
- self._is_db_backed = db_manager is not None
188
+ """Represents and manages a single discussion, acting as a high-level interface."""
189
+ def __init__(self, lollmsClient: 'LollmsClient', db_manager: Optional[LollmsDataManager] = None,
190
+ discussion_id: Optional[str] = None, db_discussion_obj: Optional[Any] = None,
191
+ autosave: bool = False, max_context_size: Optional[int] = None):
171
192
 
172
- self.session = None
173
- self.db_discussion = None
174
- self._messages_to_delete = []
175
-
176
- self._reset_in_memory_state()
193
+ object.__setattr__(self, 'lollmsClient', lollmsClient)
194
+ object.__setattr__(self, 'db_manager', db_manager)
195
+ object.__setattr__(self, 'autosave', autosave)
196
+ object.__setattr__(self, 'max_context_size', max_context_size)
197
+ object.__setattr__(self, 'scratchpad', "")
198
+ object.__setattr__(self, 'show_thoughts', False)
199
+ object.__setattr__(self, 'include_thoughts_in_context', False)
200
+ object.__setattr__(self, 'thought_placeholder', "<thought process hidden>")
201
+
202
+ object.__setattr__(self, '_session', None)
203
+ object.__setattr__(self, '_db_discussion', None)
204
+ object.__setattr__(self, '_message_index', None)
205
+ object.__setattr__(self, '_messages_to_delete_from_db', set())
206
+ object.__setattr__(self, '_is_db_backed', db_manager is not None)
177
207
 
178
208
  if self._is_db_backed:
179
- if not discussion_id: raise ValueError("A discussion_id is required for database-backed discussions.")
180
- self.session = db_manager.get_session()
181
- self._load_from_db(discussion_id)
182
- else:
183
- self.id = discussion_id or str(uuid.uuid4())
184
- self.created_at = datetime.utcnow()
185
- self.updated_at = self.created_at
186
-
187
- def _reset_in_memory_state(self):
188
- self.id: str = ""
189
- self.system_prompt: Optional[str] = None
190
- self.participants: Dict[str, str] = {}
191
- self.active_branch_id: Optional[str] = None
192
- self.metadata: Dict[str, Any] = {}
193
- self.scratchpad: str = ""
194
- self.messages: List[Dict] = []
195
- self.message_index: Dict[str, Dict] = {}
196
- self.created_at: Optional[datetime] = None
197
- self.updated_at: Optional[datetime] = None
198
-
199
- def _load_from_db(self, discussion_id: str):
200
- self.db_discussion = self.session.query(self.db_manager.DiscussionModel).filter(self.db_manager.DiscussionModel.id == discussion_id).one()
201
-
202
- self.id = self.db_discussion.id
203
- self.system_prompt = self.db_discussion.system_prompt
204
- self.participants = self.db_discussion.participants or {}
205
- self.active_branch_id = self.db_discussion.active_branch_id
206
- self.metadata = self.db_discussion.discussion_metadata or {}
207
-
208
- self.messages = []
209
- self.message_index = {}
210
- for msg in self.db_discussion.messages:
211
- msg_dict = {c.name: getattr(msg, c.name) for c in msg.__table__.columns}
212
- if 'message_metadata' in msg_dict:
213
- msg_dict['metadata'] = msg_dict.pop('message_metadata')
214
- self.messages.append(msg_dict)
215
- self.message_index[msg.id] = msg_dict
209
+ if not db_discussion_obj and not discussion_id:
210
+ raise ValueError("Either discussion_id or db_discussion_obj must be provided for DB-backed discussions.")
216
211
 
217
- def commit(self):
218
- if not self._is_db_backed or not self.session: return
219
-
220
- if self.db_discussion:
221
- self.db_discussion.system_prompt = self.system_prompt
222
- self.db_discussion.participants = self.participants
223
- self.db_discussion.active_branch_id = self.active_branch_id
224
- self.db_discussion.discussion_metadata = self.metadata
225
- self.db_discussion.updated_at = datetime.utcnow()
226
-
227
- for msg_id in self._messages_to_delete:
228
- msg_to_del = self.session.query(self.db_manager.MessageModel).filter_by(id=msg_id).first()
229
- if msg_to_del: self.session.delete(msg_to_del)
230
- self._messages_to_delete.clear()
231
-
232
- for msg_data in self.messages:
233
- msg_id = msg_data['id']
234
- msg_orm = self.session.query(self.db_manager.MessageModel).filter_by(id=msg_id).first()
235
-
236
- if 'metadata' in msg_data:
237
- msg_data['message_metadata'] = msg_data.pop('metadata',None)
238
-
239
- if not msg_orm:
240
- msg_data_copy = msg_data.copy()
241
- valid_keys = {c.name for c in self.db_manager.MessageModel.__table__.columns}
242
- filtered_msg_data = {k: v for k, v in msg_data_copy.items() if k in valid_keys}
243
- msg_orm = self.db_manager.MessageModel(**filtered_msg_data)
244
- self.session.add(msg_orm)
212
+ self._session = db_manager.get_session()
213
+ if db_discussion_obj:
214
+ self._db_discussion = self._session.merge(db_discussion_obj)
245
215
  else:
246
- for key, value in msg_data.items():
247
- if hasattr(msg_orm, key):
248
- setattr(msg_orm, key, value)
249
-
250
- self.session.commit()
251
-
252
- def touch(self):
253
- self.updated_at = datetime.utcnow()
254
- if self._is_db_backed and self.autosave:
255
- self.commit()
216
+ try:
217
+ self._db_discussion = self._session.query(db_manager.DiscussionModel).filter_by(id=discussion_id).one()
218
+ except NoResultFound:
219
+ self._session.close()
220
+ raise ValueError(f"No discussion found with ID: {discussion_id}")
221
+ else:
222
+ self._create_in_memory_proxy(id=discussion_id)
223
+ self._rebuild_message_index()
224
+
225
+ @property
226
+ def remaining_tokens(self) -> Optional[int]:
227
+ """Calculates the remaining tokens available in the context window."""
228
+ binding = self.lollmsClient.binding
229
+ if not binding or not hasattr(binding, 'ctx_size') or not binding.ctx_size:
230
+ return None
231
+ max_ctx = binding.ctx_size
232
+ current_prompt = self.format_discussion(max_ctx)
233
+ current_tokens = self.lollmsClient.count_tokens(current_prompt)
234
+ return max_ctx - current_tokens
256
235
 
257
236
  @classmethod
258
- def create_new(cls, lollms_client: 'LollmsClient', db_manager: Optional[DatabaseManager] = None, **kwargs) -> 'LollmsDiscussion':
237
+ def create_new(cls, lollms_client: 'LollmsClient', db_manager: Optional[LollmsDataManager] = None, **kwargs) -> 'LollmsDiscussion':
259
238
  init_args = {
260
239
  'autosave': kwargs.pop('autosave', False),
261
240
  'max_context_size': kwargs.pop('max_context_size', None)
262
241
  }
263
-
264
242
  if db_manager:
265
- session = db_manager.get_session()
266
- valid_keys = db_manager.DiscussionModel.__table__.columns.keys()
267
- db_creation_args = {k: v for k, v in kwargs.items() if k in valid_keys}
268
- db_discussion = db_manager.DiscussionModel(**db_creation_args)
269
- session.add(db_discussion)
270
- session.commit()
271
- return cls(lollmsClient=lollms_client, discussion_id=db_discussion.id, db_manager=db_manager, **init_args)
243
+ with db_manager.get_session() as session:
244
+ valid_keys = db_manager.DiscussionModel.__table__.columns.keys()
245
+ db_creation_args = {k: v for k, v in kwargs.items() if k in valid_keys}
246
+ db_discussion_orm = db_manager.DiscussionModel(**db_creation_args)
247
+ session.add(db_discussion_orm)
248
+ session.commit()
249
+ session.expunge(db_discussion_orm)
250
+ return cls(lollmsClient=lollms_client, db_manager=db_manager, db_discussion_obj=db_discussion_orm, **init_args)
272
251
  else:
273
- discussion_id = kwargs.get('discussion_id')
274
- return cls(lollmsClient=lollms_client, discussion_id=discussion_id, **init_args)
252
+ return cls(lollmsClient=lollms_client, discussion_id=kwargs.get('id'), **init_args)
253
+
254
+ def __getattr__(self, name: str) -> Any:
255
+ if name == 'metadata':
256
+ return getattr(self._db_discussion, 'discussion_metadata', None)
257
+ if name == 'messages':
258
+ return [LollmsMessage(self, msg) for msg in self._db_discussion.messages]
259
+ return getattr(self._db_discussion, name)
260
+
261
+ def __setattr__(self, name: str, value: Any):
262
+ internal_attrs = [
263
+ 'lollmsClient','db_manager','autosave','max_context_size','scratchpad',
264
+ 'show_thoughts', 'include_thoughts_in_context', 'thought_placeholder',
265
+ '_session','_db_discussion','_message_index','_messages_to_delete_from_db', '_is_db_backed'
266
+ ]
267
+ if name in internal_attrs:
268
+ object.__setattr__(self, name, value)
269
+ else:
270
+ if name == 'metadata':
271
+ setattr(self._db_discussion, 'discussion_metadata', value)
272
+ else:
273
+ setattr(self._db_discussion, name, value)
274
+ self.touch()
275
+
276
+ def _create_in_memory_proxy(self, id: Optional[str] = None):
277
+ proxy = SimpleNamespace()
278
+ proxy.id, proxy.system_prompt, proxy.participants = id or str(uuid.uuid4()), None, {}
279
+ proxy.active_branch_id, proxy.discussion_metadata = None, {}
280
+ proxy.created_at, proxy.updated_at = datetime.utcnow(), datetime.utcnow()
281
+ proxy.messages = []
282
+ object.__setattr__(self, '_db_discussion', proxy)
283
+
284
+ def _rebuild_message_index(self):
285
+ if self._is_db_backed and self._session.is_active and self._db_discussion in self._session:
286
+ self._session.refresh(self._db_discussion, ['messages'])
287
+ self._message_index = {msg.id: msg for msg in self._db_discussion.messages}
275
288
 
276
- def set_system_prompt(self, prompt: str):
277
- self.system_prompt = prompt
278
- self.touch()
289
+ def touch(self):
290
+ setattr(self._db_discussion, 'updated_at', datetime.utcnow())
291
+ if self._is_db_backed and self.autosave:
292
+ self.commit()
279
293
 
280
- def set_participants(self, participants: Dict[str, str]):
281
- for name, role in participants.items():
282
- if role not in ["user", "assistant", "system"]:
283
- raise ValueError(f"Invalid role '{role}' for participant '{name}'")
284
- self.participants = participants
285
- self.touch()
294
+ def commit(self):
295
+ if not self._is_db_backed or not self._session:
296
+ return
297
+ if self._messages_to_delete_from_db:
298
+ for msg_id in self._messages_to_delete_from_db:
299
+ msg_to_del = self._session.get(self.db_manager.MessageModel, msg_id)
300
+ if msg_to_del:
301
+ self._session.delete(msg_to_del)
302
+ self._messages_to_delete_from_db.clear()
303
+ try:
304
+ self._session.commit()
305
+ self._rebuild_message_index()
306
+ except Exception as e:
307
+ self._session.rollback()
308
+ raise e
286
309
 
287
- def add_message(self, **kwargs) -> Dict:
288
- msg_id = kwargs.get('id', str(uuid.uuid4()))
289
- parent_id = kwargs.get('parent_id', self.active_branch_id or None)
290
-
291
- message_data = {
292
- 'id': msg_id, 'parent_id': parent_id,
293
- 'discussion_id': self.id, 'created_at': datetime.utcnow(),
294
- **kwargs
295
- }
296
-
297
- self.messages.append(message_data)
298
- self.message_index[msg_id] = message_data
299
- self.active_branch_id = msg_id
310
+ def close(self):
311
+ if self._session:
312
+ self.commit()
313
+ self._session.close()
314
+
315
+ def add_message(self, **kwargs) -> LollmsMessage:
316
+ msg_id, parent_id = kwargs.get('id', str(uuid.uuid4())), kwargs.get('parent_id', self.active_branch_id)
317
+ message_data = {'id': msg_id, 'parent_id': parent_id, 'discussion_id': self.id, 'created_at': datetime.utcnow(), **kwargs}
318
+ if 'metadata' in message_data:
319
+ message_data['message_metadata'] = message_data.pop('metadata')
320
+ if self._is_db_backed:
321
+ valid_keys = {c.name for c in self.db_manager.MessageModel.__table__.columns}
322
+ filtered_data = {k: v for k, v in message_data.items() if k in valid_keys}
323
+ new_msg_orm = self.db_manager.MessageModel(**filtered_data)
324
+ self._db_discussion.messages.append(new_msg_orm)
325
+ if new_msg_orm not in self._session:
326
+ self._session.add(new_msg_orm)
327
+ else:
328
+ new_msg_orm = SimpleNamespace(**message_data)
329
+ self._db_discussion.messages.append(new_msg_orm)
330
+ self._message_index[msg_id], self.active_branch_id = new_msg_orm, msg_id
300
331
  self.touch()
301
- return message_data
302
-
303
- def get_branch(self, leaf_id: Optional[str]) -> List[Dict]:
304
- if not leaf_id: return []
305
- branch = []
306
- current_id: Optional[str] = leaf_id
307
- while current_id and current_id in self.message_index:
308
- msg = self.message_index[current_id]
309
- branch.append(msg)
310
- current_id = msg.get('parent_id')
311
- return list(reversed(branch))
332
+ return LollmsMessage(self, new_msg_orm)
312
333
 
313
- def chat(self, user_message: str, show_thoughts: bool = False, **kwargs) -> Dict:
334
+ def get_branch(self, leaf_id: Optional[str]) -> List[LollmsMessage]:
335
+ if not leaf_id:
336
+ return []
337
+ branch_orms, current_id = [], leaf_id
338
+ while current_id and current_id in self._message_index:
339
+ msg_orm = self._message_index[current_id]
340
+ branch_orms.append(msg_orm)
341
+ current_id = msg_orm.parent_id
342
+ return [LollmsMessage(self, orm) for orm in reversed(branch_orms)]
343
+
344
+ def chat(self, user_message: str, personality: Optional['LollmsPersonality'] = None, **kwargs) -> LollmsMessage:
314
345
  if self.max_context_size is not None:
315
346
  self.summarize_and_prune(self.max_context_size)
316
-
347
+
317
348
  if user_message:
318
349
  self.add_message(sender="user", sender_type="user", content=user_message)
319
350
 
351
+ rag_context = None
352
+ original_system_prompt = self.system_prompt
353
+ if personality:
354
+ self.system_prompt = personality.system_prompt
355
+ if user_message:
356
+ rag_context = personality.get_rag_context(user_message)
357
+
358
+ if rag_context:
359
+ self.system_prompt = f"{original_system_prompt or ''}\n\n--- Relevant Information ---\n{rag_context}\n---"
360
+
320
361
  from lollms_client.lollms_types import MSG_TYPE
362
+ is_streaming = "streaming_callback" in kwargs and kwargs.get("streaming_callback") is not None
321
363
 
322
- is_streaming = "streaming_callback" in kwargs and kwargs["streaming_callback"] is not None
323
-
324
- if is_streaming:
325
- full_response_parts = []
326
- token_buffer = ""
327
- in_thought_block = False
328
- original_callback = kwargs.get("streaming_callback")
329
-
330
- def accumulating_callback(token: str, msg_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_CHUNK):
331
- nonlocal token_buffer, in_thought_block
332
- continue_streaming = True
333
-
334
- if token: token_buffer += token
335
-
336
- while True:
337
- if in_thought_block:
338
- end_tag_pos = token_buffer.find("</think>")
339
- if end_tag_pos != -1:
340
- thought_chunk = token_buffer[:end_tag_pos]
341
- if show_thoughts and original_callback and thought_chunk:
342
- if not original_callback(thought_chunk, MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK): continue_streaming = False
343
- in_thought_block = False
344
- token_buffer = token_buffer[end_tag_pos + len("</think>"):]
345
- else:
346
- if show_thoughts and original_callback and token_buffer:
347
- if not original_callback(token_buffer, MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK): continue_streaming = False
348
- token_buffer = ""
349
- break
350
- else:
351
- start_tag_pos = token_buffer.find("<think>")
352
- if start_tag_pos != -1:
353
- response_chunk = token_buffer[:start_tag_pos]
354
- if response_chunk:
355
- full_response_parts.append(response_chunk)
356
- if original_callback:
357
- if not original_callback(response_chunk, MSG_TYPE.MSG_TYPE_CHUNK): continue_streaming = False
358
- in_thought_block = True
359
- token_buffer = token_buffer[start_tag_pos + len("<think>"):]
360
- else:
361
- if token_buffer:
362
- full_response_parts.append(token_buffer)
363
- if original_callback:
364
- if not original_callback(token_buffer, MSG_TYPE.MSG_TYPE_CHUNK): continue_streaming = False
365
- token_buffer = ""
366
- break
367
- return continue_streaming
368
-
369
- kwargs["streaming_callback"] = accumulating_callback
370
- kwargs["stream"] = True
371
-
372
- self.lollmsClient.chat(self, **kwargs)
373
- ai_response = "".join(full_response_parts)
364
+ final_raw_response = ""
365
+ start_time = datetime.now()
366
+
367
+ if personality and personality.script_module and hasattr(personality.script_module, 'run'):
368
+ try:
369
+ print(f"[{personality.name}] Running custom script...")
370
+ final_raw_response = personality.script_module.run(self, kwargs.get("streaming_callback"))
371
+ except Exception as e:
372
+ print(f"[{personality.name}] Error in custom script: {e}")
373
+ final_raw_response = f"Error executing personality script: {e}"
374
374
  else:
375
- kwargs["stream"] = False
376
- raw_response = self.lollmsClient.chat(self, **kwargs)
377
- ai_response = self.lollmsClient.remove_thinking_blocks(raw_response) if raw_response else ""
375
+ raw_response_accumulator = []
376
+ if is_streaming:
377
+ full_response_parts, token_buffer, in_thought_block = [], "", False
378
+ original_callback = kwargs.get("streaming_callback")
379
+ def accumulating_callback(token: str, msg_type: MSG_TYPE = MSG_TYPE.MSG_TYPE_CHUNK):
380
+ nonlocal token_buffer, in_thought_block
381
+ raw_response_accumulator.append(token)
382
+ continue_streaming = True
383
+ if token: token_buffer += token
384
+ while True:
385
+ if in_thought_block:
386
+ end_tag_pos = token_buffer.find("</think>")
387
+ if end_tag_pos != -1:
388
+ thought_chunk = token_buffer[:end_tag_pos]
389
+ if self.show_thoughts and original_callback and thought_chunk:
390
+ if not original_callback(thought_chunk, MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK): continue_streaming = False
391
+ in_thought_block, token_buffer = False, token_buffer[end_tag_pos + len("</think>"):]
392
+ else:
393
+ if self.show_thoughts and original_callback and token_buffer:
394
+ if not original_callback(token_buffer, MSG_TYPE.MSG_TYPE_THOUGHT_CHUNK): continue_streaming = False
395
+ token_buffer = ""; break
396
+ else:
397
+ start_tag_pos = token_buffer.find("<think>")
398
+ if start_tag_pos != -1:
399
+ response_chunk = token_buffer[:start_tag_pos]
400
+ if response_chunk:
401
+ full_response_parts.append(response_chunk)
402
+ if original_callback:
403
+ if not original_callback(response_chunk, MSG_TYPE.MSG_TYPE_CHUNK): continue_streaming = False
404
+ in_thought_block, token_buffer = True, token_buffer[start_tag_pos + len("<think>"):]
405
+ else:
406
+ if token_buffer:
407
+ full_response_parts.append(token_buffer)
408
+ if original_callback:
409
+ if not original_callback(token_buffer, MSG_TYPE.MSG_TYPE_CHUNK): continue_streaming = False
410
+ token_buffer = ""; break
411
+ return continue_streaming
412
+ kwargs["streaming_callback"], kwargs["stream"] = accumulating_callback, True
413
+ self.lollmsClient.chat(self, **kwargs)
414
+ final_raw_response = "".join(raw_response_accumulator)
415
+ else:
416
+ kwargs["stream"] = False
417
+ final_raw_response = self.lollmsClient.chat(self, **kwargs) or ""
378
418
 
379
- ai_message_obj = self.add_message(sender="assistant", sender_type="assistant", content=ai_response)
419
+ end_time = datetime.now()
420
+ if rag_context:
421
+ self.system_prompt = original_system_prompt
422
+
423
+ duration = (end_time - start_time).total_seconds()
424
+ thoughts_match = re.search(r"<think>(.*?)</think>", final_raw_response, re.DOTALL)
425
+ thoughts_text = thoughts_match.group(1).strip() if thoughts_match else None
426
+ final_content = self.lollmsClient.remove_thinking_blocks(final_raw_response)
427
+ token_count = self.lollmsClient.count_tokens(final_content)
428
+ tok_per_sec = (token_count / duration) if duration > 0 else 0
429
+
430
+ ai_message_obj = self.add_message(
431
+ sender="assistant", sender_type="assistant", content=final_content,
432
+ raw_content=final_raw_response, thoughts=thoughts_text, tokens=token_count,
433
+ binding_name=self.lollmsClient.binding.binding_name, model_name=self.lollmsClient.binding.model_name,
434
+ generation_speed=tok_per_sec
435
+ )
380
436
 
381
437
  if self._is_db_backed and not self.autosave:
382
438
  self.commit()
383
-
384
439
  return ai_message_obj
385
440
 
386
- def regenerate_branch(self, show_thoughts: bool = False, **kwargs) -> Dict:
387
- last_message = self.message_index.get(self.active_branch_id)
388
- if not last_message or last_message['sender_type'] != 'assistant':
441
+ def process_and_summarize(self, large_text: str, user_prompt: str, chunk_size: int = 4096, **kwargs) -> LollmsMessage:
442
+ user_msg = self.add_message(sender="user", sender_type="user", content=user_prompt)
443
+ chunks = [large_text[i:i + chunk_size] for i in range(0, len(large_text), chunk_size)]
444
+ current_summary, total_chunks = "", len(chunks)
445
+ for i, chunk in enumerate(chunks):
446
+ print(f"\nProcessing chunk {i+1}/{total_chunks}...")
447
+ if i == 0:
448
+ prompt = f"""The user wants to know: "{user_prompt}"\nHere is the first part of the document (chunk 1 of {total_chunks}). \nRead it and create a detailed summary of all information relevant to the user's prompt.\n\nDOCUMENT CHUNK:\n---\n{chunk}\n---\nSUMMARY:"""
449
+ else:
450
+ prompt = f"""The user wants to know: "{user_prompt}"\nYou are processing a large document sequentially. Here is the summary of the previous chunks and the content of the next chunk ({i+1} of {total_chunks}).\nUpdate your summary by integrating new relevant information from the new chunk. Do not repeat information you already have. Output ONLY the new, updated, complete summary.\n\nPREVIOUS SUMMARY:\n---\n{current_summary}\n---\n\nNEW DOCUMENT CHUNK:\n---\n{chunk}\n---\nUPDATED SUMMARY:"""
451
+ current_summary = self.lollmsClient.generate_text(prompt, **kwargs).strip()
452
+ final_prompt = f"""Based on the following comprehensive summary of a document, provide a final answer to the user's original prompt.\nUser's prompt: "{user_prompt}"\n\nCOMPREHENSIVE SUMMARY:\n---\n{current_summary}\n---\nFINAL ANSWER:"""
453
+ final_answer = self.lollmsClient.generate_text(final_prompt, **kwargs).strip()
454
+ ai_message_obj = self.add_message(
455
+ sender="assistant", sender_type="assistant", content=final_answer,
456
+ scratchpad=current_summary, parent_id=user_msg.id
457
+ )
458
+ if self._is_db_backed and not self.autosave:
459
+ self.commit()
460
+ return ai_message_obj
461
+
462
+ def regenerate_branch(self, **kwargs) -> LollmsMessage:
463
+ if not self.active_branch_id or self.active_branch_id not in self._message_index:
464
+ raise ValueError("No active message to regenerate from.")
465
+ last_message_orm = self._message_index[self.active_branch_id]
466
+ if last_message_orm.sender_type != 'assistant':
389
467
  raise ValueError("Can only regenerate from an assistant's message.")
390
-
391
- parent_id = last_message['parent_id']
468
+ parent_id, last_message_id = last_message_orm.parent_id, last_message_orm.id
469
+ self._db_discussion.messages.remove(last_message_orm)
470
+ del self._message_index[last_message_id]
471
+ if self._is_db_backed:
472
+ self._messages_to_delete_from_db.add(last_message_id)
392
473
  self.active_branch_id = parent_id
393
-
394
- self.messages = [m for m in self.messages if m['id'] != last_message['id']]
395
- self._messages_to_delete.append(last_message['id'])
396
- self._rebuild_in_memory_indexes()
397
-
398
- new_ai_response_obj = self.chat("", show_thoughts, **kwargs)
399
- return new_ai_response_obj
474
+ self.touch()
475
+ return self.chat("", **kwargs)
400
476
 
401
477
  def delete_branch(self, message_id: str):
402
478
  if not self._is_db_backed:
403
479
  raise NotImplementedError("Branch deletion is only supported for database-backed discussions.")
404
-
405
- if message_id not in self.message_index:
480
+ if message_id not in self._message_index:
406
481
  raise ValueError("Message not found.")
407
-
408
- msg_to_delete = self.session.query(self.db_manager.MessageModel).filter_by(id=message_id).first()
482
+ msg_to_delete = self._session.query(self.db_manager.MessageModel).filter_by(id=message_id).first()
409
483
  if msg_to_delete:
410
- parent_id = msg_to_delete.parent_id
411
- self.session.delete(msg_to_delete)
412
- self.active_branch_id = parent_id
484
+ self.active_branch_id = msg_to_delete.parent_id
485
+ self._session.delete(msg_to_delete)
413
486
  self.commit()
414
- self._load_from_db(self.id)
415
487
 
416
488
  def switch_to_branch(self, message_id: str):
417
- if message_id not in self.message_index:
489
+ if message_id not in self._message_index:
418
490
  raise ValueError(f"Message ID '{message_id}' not found in the current discussion.")
419
491
  self.active_branch_id = message_id
420
- if self._is_db_backed:
421
- self.db_discussion.active_branch_id = message_id
422
- if self.autosave: self.commit()
492
+ self.touch()
423
493
 
424
494
  def format_discussion(self, max_allowed_tokens: int, branch_tip_id: Optional[str] = None) -> str:
425
495
  return self.export("lollms_text", branch_tip_id, max_allowed_tokens)
426
496
 
427
497
  def _get_full_system_prompt(self) -> Optional[str]:
428
- full_sys_prompt_parts = []
498
+ parts = []
429
499
  if self.scratchpad:
430
- full_sys_prompt_parts.append("--- KNOWLEDGE SCRATCHPAD ---")
431
- full_sys_prompt_parts.append(self.scratchpad.strip())
432
- full_sys_prompt_parts.append("--- END SCRATCHPAD ---")
433
-
500
+ parts.extend(["--- KNOWLEDGE SCRATCHPAD ---", self.scratchpad.strip(), "--- END SCRATCHPAD ---"])
434
501
  if self.system_prompt and self.system_prompt.strip():
435
- full_sys_prompt_parts.append(self.system_prompt.strip())
436
-
437
- return "\n\n".join(full_sys_prompt_parts) if full_sys_prompt_parts else None
502
+ parts.append(self.system_prompt.strip())
503
+ return "\n\n".join(parts) if parts else None
438
504
 
439
505
  def export(self, format_type: str, branch_tip_id: Optional[str] = None, max_allowed_tokens: Optional[int] = None) -> Union[List[Dict], str]:
440
- if branch_tip_id is None: branch_tip_id = self.active_branch_id
506
+ branch_tip_id = branch_tip_id or self.active_branch_id
441
507
  if not branch_tip_id and format_type in ["lollms_text", "openai_chat", "ollama_chat"]:
442
508
  return "" if format_type == "lollms_text" else []
509
+ branch, full_system_prompt, participants = self.get_branch(branch_tip_id), self._get_full_system_prompt(), self.participants or {}
443
510
 
444
- branch = self.get_branch(branch_tip_id)
445
- full_system_prompt = self._get_full_system_prompt()
446
-
447
- participants = self.participants or {}
511
+ def get_full_content(msg: LollmsMessage) -> str:
512
+ content_to_use = msg.content
513
+ if self.include_thoughts_in_context and msg.sender_type == 'assistant' and msg.raw_content:
514
+ if self.thought_placeholder:
515
+ content_to_use = re.sub(r"<think>.*?</think>", f"<think>{self.thought_placeholder}</think>", msg.raw_content, flags=re.DOTALL)
516
+ else:
517
+ content_to_use = msg.raw_content
518
+
519
+ parts = [f"--- Internal Scratchpad ---\n{msg.scratchpad.strip()}\n---"] if msg.scratchpad and msg.scratchpad.strip() else []
520
+ parts.append(content_to_use.strip())
521
+ return "\n".join(parts)
448
522
 
449
523
  if format_type == "lollms_text":
450
- prompt_parts = []
451
- current_tokens = 0
452
-
524
+ prompt_parts, current_tokens = [], 0
453
525
  if full_system_prompt:
454
526
  sys_msg_text = f"!@>system:\n{full_system_prompt}\n"
455
527
  sys_tokens = self.lollmsClient.count_tokens(sys_msg_text)
456
528
  if max_allowed_tokens is None or sys_tokens <= max_allowed_tokens:
457
529
  prompt_parts.append(sys_msg_text)
458
530
  current_tokens += sys_tokens
459
-
460
531
  for msg in reversed(branch):
461
- sender_str = msg['sender'].replace(':', '').replace('!@>', '')
462
- content = msg['content'].strip()
463
- if msg.get('images'): content += f"\n({len(msg['images'])} image(s) attached)"
532
+ sender_str = msg.sender.replace(':', '').replace('!@>', '')
533
+ content = get_full_content(msg)
534
+ if msg.images:
535
+ content += f"\n({len(msg.images)} image(s) attached)"
464
536
  msg_text = f"!@>{sender_str}:\n{content}\n"
465
537
  msg_tokens = self.lollmsClient.count_tokens(msg_text)
466
-
467
- if max_allowed_tokens is not None and current_tokens + msg_tokens > max_allowed_tokens: break
538
+ if max_allowed_tokens is not None and current_tokens + msg_tokens > max_allowed_tokens:
539
+ break
468
540
  prompt_parts.insert(1 if full_system_prompt else 0, msg_text)
469
541
  current_tokens += msg_tokens
470
542
  return "".join(prompt_parts).strip()
471
-
543
+
472
544
  messages = []
473
545
  if full_system_prompt:
474
546
  messages.append({"role": "system", "content": full_system_prompt})
475
-
476
547
  for msg in branch:
477
- role = participants.get(msg['sender'], "user")
478
- content = msg.get('content', '').strip()
479
- images = msg.get('images', [])
480
-
548
+ role, content, images = participants.get(msg.sender, "user"), get_full_content(msg), msg.images or []
481
549
  if format_type == "openai_chat":
482
550
  if images:
483
551
  content_parts = [{"type": "text", "text": content}] if content else []
484
552
  for img in images:
485
- image_url = img['data'] if img['type'] == 'url' else f"data:image/jpeg;base64,{img['data']}"
486
- content_parts.append({"type": "image_url", "image_url": {"url": image_url, "detail": "auto"}})
553
+ content_parts.append({"type": "image_url", "image_url": {"url": img['data'] if img['type'] == 'url' else f"data:image/jpeg;base64,{img['data']}", "detail": "auto"}})
487
554
  messages.append({"role": role, "content": content_parts})
488
555
  else:
489
556
  messages.append({"role": role, "content": content})
490
557
  elif format_type == "ollama_chat":
491
558
  message_dict = {"role": role, "content": content}
492
- base64_images = [img['data'] for img in images or [] if img['type'] == 'base64']
559
+ base64_images = [img['data'] for img in images if img['type'] == 'base64']
493
560
  if base64_images:
494
561
  message_dict["images"] = base64_images
495
562
  messages.append(message_dict)
496
563
  else:
497
564
  raise ValueError(f"Unsupported export format_type: {format_type}")
498
-
499
565
  return messages
500
566
 
501
567
  def summarize_and_prune(self, max_tokens: int, preserve_last_n: int = 4):
502
568
  branch_tip_id = self.active_branch_id
503
- if not branch_tip_id: return
504
-
505
- current_prompt_text = self.format_discussion(999999, branch_tip_id)
506
- current_tokens = self.lollmsClient.count_tokens(current_prompt_text)
507
- if current_tokens <= max_tokens: return
508
-
569
+ if not branch_tip_id:
570
+ return
571
+ current_tokens = self.lollmsClient.count_tokens(self.format_discussion(999999, branch_tip_id))
572
+ if current_tokens <= max_tokens:
573
+ return
509
574
  branch = self.get_branch(branch_tip_id)
510
- if len(branch) <= preserve_last_n: return
511
-
575
+ if len(branch) <= preserve_last_n:
576
+ return
512
577
  messages_to_prune = branch[:-preserve_last_n]
513
- text_to_summarize = "\n\n".join([f"{m['sender']}: {m['content']}" for m in messages_to_prune])
514
-
578
+ text_to_summarize = "\n\n".join([f"{m.sender}: {m.content}" for m in messages_to_prune])
515
579
  summary_prompt = f"Concisely summarize this conversation excerpt:\n---\n{text_to_summarize}\n---\nSUMMARY:"
516
580
  try:
517
581
  summary = self.lollmsClient.generate_text(summary_prompt, n_predict=300, temperature=0.1)
518
582
  except Exception as e:
519
583
  print(f"\n[WARNING] Pruning failed, couldn't generate summary: {e}")
520
584
  return
521
-
522
- new_scratchpad_content = f"{self.scratchpad}\n\n--- Summary of earlier conversation ---\n{summary.strip()}"
523
- self.scratchpad = new_scratchpad_content.strip()
524
-
525
- pruned_ids = {msg['id'] for msg in messages_to_prune}
526
- self.messages = [m for m in self.messages if m['id'] not in pruned_ids]
527
- self._messages_to_delete.extend(list(pruned_ids))
528
- self._rebuild_in_memory_indexes()
529
-
585
+ self.scratchpad = f"{self.scratchpad}\n\n--- Summary of earlier conversation ---\n{summary.strip()}".strip()
586
+ pruned_ids = {msg.id for msg in messages_to_prune}
587
+ if self._is_db_backed:
588
+ self._messages_to_delete_from_db.update(pruned_ids)
589
+ self._db_discussion.messages = [m for m in self._db_discussion.messages if m.id not in pruned_ids]
590
+ else:
591
+ self._db_discussion.messages = [m for m in self._db_discussion.messages if m.id not in pruned_ids]
592
+ self._rebuild_message_index()
593
+ self.touch()
530
594
  print(f"\n[INFO] Discussion auto-pruned. {len(messages_to_prune)} messages summarized.")
531
595
 
532
596
  def to_dict(self):
533
- messages_copy = [msg.copy() for msg in self.messages]
534
- for msg in messages_copy:
535
- if 'created_at' in msg and isinstance(msg['created_at'], datetime):
536
- msg['created_at'] = msg['created_at'].isoformat()
537
- if 'message_metadata' in msg:
538
- msg['metadata'] = msg.pop('message_metadata')
539
-
540
597
  return {
541
- "id": self.id, "system_prompt": self.system_prompt,
542
- "participants": self.participants, "active_branch_id": self.active_branch_id,
543
- "metadata": self.metadata, "scratchpad": self.scratchpad,
544
- "messages": messages_copy,
598
+ "id": self.id, "system_prompt": self.system_prompt, "participants": self.participants,
599
+ "active_branch_id": self.active_branch_id, "metadata": self.metadata, "scratchpad": self.scratchpad,
600
+ "messages": [{ 'id': m.id, 'parent_id': m.parent_id, 'discussion_id': m.discussion_id, 'sender': m.sender,
601
+ 'sender_type': m.sender_type, 'content': m.content, 'scratchpad': m.scratchpad, 'images': m.images,
602
+ 'created_at': m.created_at.isoformat(), 'metadata': m.metadata } for m in self.messages],
545
603
  "created_at": self.created_at.isoformat() if self.created_at else None,
546
604
  "updated_at": self.updated_at.isoformat() if self.updated_at else None
547
605
  }
548
606
 
549
607
  def load_from_dict(self, data: Dict):
550
- self._reset_in_memory_state()
551
- self.id = data.get("id", str(uuid.uuid4()))
552
- self.system_prompt = data.get("system_prompt")
553
- self.participants = data.get("participants", {})
554
- self.active_branch_id = data.get("active_branch_id")
555
- self.metadata = data.get("metadata", {})
608
+ self._create_in_memory_proxy(id=data.get("id"))
609
+ self.system_prompt, self.participants = data.get("system_prompt"), data.get("participants", {})
610
+ self.active_branch_id, self.metadata = data.get("active_branch_id"), data.get("metadata", {})
556
611
  self.scratchpad = data.get("scratchpad", "")
557
-
558
- loaded_messages = data.get("messages", [])
559
- for msg in loaded_messages:
560
- if 'created_at' in msg and isinstance(msg['created_at'], str):
612
+ for msg_data in data.get("messages", []):
613
+ if 'created_at' in msg_data and isinstance(msg_data['created_at'], str):
561
614
  try:
562
- msg['created_at'] = datetime.fromisoformat(msg['created_at'])
615
+ msg_data['created_at'] = datetime.fromisoformat(msg_data['created_at'])
563
616
  except ValueError:
564
- msg['created_at'] = datetime.utcnow()
565
- self.messages = loaded_messages
566
-
617
+ msg_data['created_at'] = datetime.utcnow()
618
+ self.add_message(**msg_data)
567
619
  self.created_at = datetime.fromisoformat(data['created_at']) if data.get('created_at') else datetime.utcnow()
568
620
  self.updated_at = datetime.fromisoformat(data['updated_at']) if data.get('updated_at') else self.created_at
569
- self._rebuild_in_memory_indexes()
570
-
571
- def _rebuild_in_memory_indexes(self):
572
- self.message_index = {msg['id']: msg for msg in self.messages}
573
621
 
574
622
  @staticmethod
575
- def migrate(lollms_client: 'LollmsClient', db_manager: DatabaseManager, folder_path: Union[str, Path]):
623
+ def migrate(lollms_client: 'LollmsClient', db_manager: LollmsDataManager, folder_path: Union[str, Path]):
576
624
  folder = Path(folder_path)
577
625
  if not folder.is_dir():
578
626
  print(f"Error: Path '{folder}' is not a valid directory.")
579
627
  return
580
-
581
628
  print(f"\n--- Starting Migration from '{folder}' ---")
582
- discussion_files = list(folder.glob("*.json")) + list(folder.glob("*.yaml"))
583
- session = db_manager.get_session()
584
- for i, file_path in enumerate(discussion_files):
585
- print(f"Migrating file {i+1}/{len(discussion_files)}: {file_path.name} ... ", end="")
586
- try:
587
- in_memory_discussion = LollmsDiscussion.create_new(lollms_client=lollms_client)
588
- if file_path.suffix.lower() == ".json":
589
- with open(file_path, 'r', encoding='utf-8') as f: data = json.load(f)
590
- else:
591
- with open(file_path, 'r', encoding='utf-8') as f: data = yaml.safe_load(f)
592
-
593
- in_memory_discussion.load_from_dict(data)
594
- discussion_id = in_memory_discussion.id
595
-
596
- existing = session.query(db_manager.DiscussionModel).filter_by(id=discussion_id).first()
597
- if existing:
598
- print("SKIPPED (already exists)")
629
+ files = list(folder.glob("*.json")) + list(folder.glob("*.yaml"))
630
+ with db_manager.get_session() as session:
631
+ valid_disc_keys = {c.name for c in db_manager.DiscussionModel.__table__.columns}
632
+ valid_msg_keys = {c.name for c in db_manager.MessageModel.__table__.columns}
633
+ for i, file_path in enumerate(files):
634
+ print(f"Migrating file {i+1}/{len(files)}: {file_path.name} ... ", end="")
635
+ try:
636
+ data = yaml.safe_load(file_path.read_text(encoding='utf-8'))
637
+ discussion_id = data.get("id", str(uuid.uuid4()))
638
+ if session.query(db_manager.DiscussionModel).filter_by(id=discussion_id).first():
639
+ print("SKIPPED (already exists)")
640
+ continue
641
+ discussion_data = data.copy()
642
+ if 'metadata' in discussion_data:
643
+ discussion_data['discussion_metadata'] = discussion_data.pop('metadata')
644
+ for key in ['created_at', 'updated_at']:
645
+ if key in discussion_data and isinstance(discussion_data[key], str):
646
+ discussion_data[key] = datetime.fromisoformat(discussion_data[key])
647
+ db_discussion = db_manager.DiscussionModel(**{k: v for k, v in discussion_data.items() if k in valid_disc_keys})
648
+ session.add(db_discussion)
649
+ for msg_data in data.get("messages", []):
650
+ msg_data['discussion_id'] = db_discussion.id
651
+ if 'metadata' in msg_data:
652
+ msg_data['message_metadata'] = msg_data.pop('metadata')
653
+ if 'created_at' in msg_data and isinstance(msg_data['created_at'], str):
654
+ msg_data['created_at'] = datetime.fromisoformat(msg_data['created_at'])
655
+ msg_orm = db_manager.MessageModel(**{k: v for k, v in msg_data.items() if k in valid_msg_keys})
656
+ session.add(msg_orm)
657
+ session.flush()
658
+ print("OK")
659
+ except Exception as e:
660
+ print(f"FAILED. Error: {e}")
661
+ session.rollback()
599
662
  continue
600
-
601
- valid_disc_keys = {c.name for c in db_manager.DiscussionModel.__table__.columns}
602
- valid_msg_keys = {c.name for c in db_manager.MessageModel.__table__.columns}
603
-
604
- discussion_data = {
605
- 'id': in_memory_discussion.id,
606
- 'system_prompt': in_memory_discussion.system_prompt,
607
- 'participants': in_memory_discussion.participants,
608
- 'active_branch_id': in_memory_discussion.active_branch_id,
609
- 'discussion_metadata': in_memory_discussion.metadata,
610
- 'created_at': in_memory_discussion.created_at,
611
- 'updated_at': in_memory_discussion.updated_at
612
- }
613
- project_name = in_memory_discussion.metadata.get('project_name', file_path.stem)
614
- if 'project_name' in valid_disc_keys:
615
- discussion_data['project_name'] = project_name
616
-
617
- db_discussion = db_manager.DiscussionModel(**discussion_data)
618
- session.add(db_discussion)
619
-
620
- for msg_data in in_memory_discussion.messages:
621
- msg_data['discussion_id'] = db_discussion.id
622
- if 'metadata' in msg_data:
623
- msg_data['message_metadata'] = msg_data.pop('metadata')
624
- filtered_msg_data = {k: v for k, v in msg_data.items() if k in valid_msg_keys}
625
- msg_orm = db_manager.MessageModel(**filtered_msg_data)
626
- session.add(msg_orm)
627
-
628
- print("OK")
629
- except Exception as e:
630
- print(f"FAILED. Error: {e}")
631
- session.rollback()
632
- session.commit()
633
- session.close()
663
+ session.commit()
664
+ print("--- Migration Finished ---")