memorisdk 1.0.1__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

Files changed (46) hide show
  1. memori/__init__.py +24 -8
  2. memori/agents/conscious_agent.py +252 -414
  3. memori/agents/memory_agent.py +487 -224
  4. memori/agents/retrieval_agent.py +416 -60
  5. memori/config/memory_manager.py +323 -0
  6. memori/core/conversation.py +393 -0
  7. memori/core/database.py +386 -371
  8. memori/core/memory.py +1676 -534
  9. memori/core/providers.py +217 -0
  10. memori/database/adapters/__init__.py +10 -0
  11. memori/database/adapters/mysql_adapter.py +331 -0
  12. memori/database/adapters/postgresql_adapter.py +291 -0
  13. memori/database/adapters/sqlite_adapter.py +229 -0
  14. memori/database/auto_creator.py +320 -0
  15. memori/database/connection_utils.py +207 -0
  16. memori/database/connectors/base_connector.py +283 -0
  17. memori/database/connectors/mysql_connector.py +240 -18
  18. memori/database/connectors/postgres_connector.py +277 -4
  19. memori/database/connectors/sqlite_connector.py +178 -3
  20. memori/database/models.py +400 -0
  21. memori/database/queries/base_queries.py +1 -1
  22. memori/database/queries/memory_queries.py +91 -2
  23. memori/database/query_translator.py +222 -0
  24. memori/database/schema_generators/__init__.py +7 -0
  25. memori/database/schema_generators/mysql_schema_generator.py +215 -0
  26. memori/database/search/__init__.py +8 -0
  27. memori/database/search/mysql_search_adapter.py +255 -0
  28. memori/database/search/sqlite_search_adapter.py +180 -0
  29. memori/database/search_service.py +548 -0
  30. memori/database/sqlalchemy_manager.py +839 -0
  31. memori/integrations/__init__.py +36 -11
  32. memori/integrations/litellm_integration.py +340 -6
  33. memori/integrations/openai_integration.py +506 -240
  34. memori/utils/input_validator.py +395 -0
  35. memori/utils/pydantic_models.py +138 -36
  36. memori/utils/query_builder.py +530 -0
  37. memori/utils/security_audit.py +594 -0
  38. memori/utils/security_integration.py +339 -0
  39. memori/utils/transaction_manager.py +547 -0
  40. {memorisdk-1.0.1.dist-info → memorisdk-2.0.0.dist-info}/METADATA +144 -34
  41. memorisdk-2.0.0.dist-info/RECORD +67 -0
  42. memorisdk-1.0.1.dist-info/RECORD +0 -44
  43. memorisdk-1.0.1.dist-info/entry_points.txt +0 -2
  44. {memorisdk-1.0.1.dist-info → memorisdk-2.0.0.dist-info}/WHEEL +0 -0
  45. {memorisdk-1.0.1.dist-info → memorisdk-2.0.0.dist-info}/licenses/LICENSE +0 -0
  46. {memorisdk-1.0.1.dist-info → memorisdk-2.0.0.dist-info}/top_level.txt +0 -0
@@ -1,506 +1,344 @@
1
1
  """
2
- Conscious Agent for Background Memory Processing
2
+ Conscious Agent for User Context Management
3
3
 
4
- This agent analyzes long-term memory patterns to extract essential personal facts
5
- and promote them to short-term memory for immediate context injection.
4
+ This agent copies conscious-info labeled memories from long-term memory
5
+ directly to short-term memory for immediate context availability.
6
6
  """
7
7
 
8
8
  import json
9
- import os
10
- import uuid
11
- from datetime import datetime, timedelta
12
- from typing import Any, Dict, List, Optional
9
+ from datetime import datetime
10
+ from typing import List
13
11
 
14
12
  from loguru import logger
15
- from openai import AsyncOpenAI
16
- from pydantic import BaseModel, Field
17
-
18
-
19
- class EssentialMemory(BaseModel):
20
- """Essential conversation memory identified for short-term storage"""
21
-
22
- memory_id: str = Field(description="Original memory ID from long-term storage")
23
- summary: str = Field(description="Summary of the conversation")
24
- category: str = Field(description="Memory category")
25
- importance_score: float = Field(ge=0.0, le=1.0, description="Importance score")
26
- frequency_score: float = Field(
27
- ge=0.0, le=1.0, description="How frequently this is referenced"
28
- )
29
- recency_score: float = Field(
30
- ge=0.0, le=1.0, description="How recent this information is"
31
- )
32
- relevance_reasoning: str = Field(description="Why this memory is essential")
33
-
34
-
35
- class EssentialMemoriesAnalysis(BaseModel):
36
- """Analysis result containing essential memories to promote to short-term"""
37
-
38
- essential_memories: List[EssentialMemory] = Field(
39
- default_factory=list,
40
- description="Conversations that should be promoted to short-term memory",
41
- )
42
- analysis_reasoning: str = Field(
43
- description="Overall reasoning for memory selection"
44
- )
45
- total_analyzed: int = Field(description="Total memories analyzed")
46
- promoted_count: int = Field(
47
- description="Number of memories recommended for promotion"
48
- )
49
13
 
50
14
 
51
15
  class ConsciouscAgent:
52
16
  """
53
- Background agent that analyzes long-term memory to extract essential personal facts.
17
+ Agent that copies conscious-info labeled memories from long-term memory
18
+ directly to short-term memory for immediate context availability.
54
19
 
55
- This agent mimics the conscious mind's ability to keep essential information
56
- readily accessible in short-term memory.
20
+ Runs once at program startup when conscious_ingest=True.
57
21
  """
58
22
 
59
- def __init__(self, api_key: Optional[str] = None, model: str = "gpt-4o"):
23
+ def __init__(self):
24
+ """Initialize the conscious agent"""
25
+ self.context_initialized = False
26
+
27
+ async def run_conscious_ingest(
28
+ self, db_manager, namespace: str = "default"
29
+ ) -> bool:
60
30
  """
61
- Initialize the conscious agent
31
+ Run conscious context ingestion once at program startup
32
+
33
+ Copies all conscious-info labeled memories from long-term memory
34
+ directly to short-term memory as permanent context
62
35
 
63
36
  Args:
64
- api_key: OpenAI API key (if None, uses environment variable)
65
- model: OpenAI model to use for analysis (gpt-4o recommended)
37
+ db_manager: Database manager instance
38
+ namespace: Memory namespace
39
+
40
+ Returns:
41
+ True if memories were copied, False otherwise
66
42
  """
67
- self.api_key = api_key
68
- self.model = model
69
-
70
- # Check if API key is available (either provided or in environment)
71
- effective_api_key = api_key or os.getenv("OPENAI_API_KEY")
72
-
73
- if effective_api_key:
74
- self.client = AsyncOpenAI(
75
- api_key=api_key
76
- ) # AsyncOpenAI handles None api_key automatically
77
- else:
78
- self.client = None
79
- logger.warning(
80
- "ConsciouscAgent: No OpenAI API key found. Set OPENAI_API_KEY environment variable or provide api_key parameter."
43
+ try:
44
+ # Get all conscious-info labeled memories
45
+ conscious_memories = await self._get_conscious_memories(
46
+ db_manager, namespace
81
47
  )
82
48
 
83
- self.last_analysis = None
84
- self.analysis_interval = timedelta(hours=6) # Analyze every 6 hours
85
-
86
- # System prompt for memory selection
87
- self.system_prompt = """You are a Conscious Agent responsible for selecting essential conversations from long-term memory to promote to short-term memory.
88
-
89
- Your role is to identify the most important conversations that should be readily available for immediate context injection.
49
+ if not conscious_memories:
50
+ logger.info("ConsciouscAgent: No conscious-info memories found")
51
+ return False
90
52
 
91
- SELECTION CRITERIA:
53
+ # Copy each conscious-info memory directly to short-term memory
54
+ copied_count = 0
55
+ for memory_row in conscious_memories:
56
+ success = await self._copy_memory_to_short_term(
57
+ db_manager, namespace, memory_row
58
+ )
59
+ if success:
60
+ copied_count += 1
92
61
 
93
- 1. PERSONAL IDENTITY: Conversations where the user shares their name, occupation, location, or basic info
94
- 2. PREFERENCES & HABITS: Conversations revealing likes, dislikes, routines, sleep schedule, work patterns
95
- 3. SKILLS & EXPERTISE: Conversations about their technical skills, programming languages, tools they use
96
- 4. CURRENT PROJECTS: Conversations about ongoing work, projects, or learning goals
97
- 5. RELATIONSHIPS: Conversations mentioning important people, colleagues, or connections
98
- 6. REPEATED REFERENCES: Conversations that get referenced or built upon in later discussions
62
+ # Mark memories as processed
63
+ memory_ids = [
64
+ row[0] for row in conscious_memories
65
+ ] # memory_id is first column
66
+ await self._mark_memories_processed(db_manager, memory_ids, namespace)
99
67
 
100
- SCORING GUIDELINES:
101
- - **Frequency Score**: How often this information is referenced or mentioned again
102
- - **Recency Score**: How recent and relevant this information remains
103
- - **Importance Score**: How critical this information is for understanding the person
68
+ self.context_initialized = True
69
+ logger.info(
70
+ f"ConsciouscAgent: Copied {copied_count} conscious-info memories to short-term memory"
71
+ )
104
72
 
105
- SELECT conversations that:
106
- - Contain foundational information about the person (name, role, preferences)
107
- - Are frequently referenced or built upon in later conversations
108
- - Provide essential context for understanding future conversations
109
- - Represent stable, long-term characteristics rather than temporary states
73
+ return copied_count > 0
110
74
 
111
- AVOID conversations that:
112
- - Are purely transactional or generic
113
- - Contain outdated or superseded information
114
- - Are highly specific to a single context that hasn't been revisited"""
75
+ except Exception as e:
76
+ logger.error(f"ConsciouscAgent: Conscious ingest failed: {e}")
77
+ return False
115
78
 
116
- async def analyze_memory_patterns(
117
- self, db_manager, namespace: str = "default", min_memories: int = 10
118
- ) -> Optional[EssentialMemoriesAnalysis]:
79
+ async def initialize_existing_conscious_memories(
80
+ self, db_manager, namespace: str = "default"
81
+ ) -> bool:
119
82
  """
120
- Analyze long-term memory patterns to select essential conversations
83
+ Initialize by copying ALL existing conscious-info memories to short-term memory
84
+ This is called when both auto_ingest=True and conscious_ingest=True
85
+ to ensure essential conscious information is immediately available
121
86
 
122
87
  Args:
123
88
  db_manager: Database manager instance
124
- namespace: Memory namespace to analyze
125
- min_memories: Minimum number of memories needed for analysis
89
+ namespace: Memory namespace
126
90
 
127
91
  Returns:
128
- EssentialMemoriesAnalysis with selected conversations or None if insufficient data
92
+ True if memories were processed, False otherwise
129
93
  """
130
- if not self.client:
131
- logger.debug("ConsciouscAgent: No API client available, skipping analysis")
132
- return None
133
-
134
94
  try:
135
- # Get all long-term memories for analysis
136
- memories = await self._get_long_term_memories(db_manager, namespace)
95
+ from sqlalchemy import text
137
96
 
138
- if len(memories) < min_memories:
139
- logger.info(
140
- f"ConsciouscAgent: Insufficient memories ({len(memories)}) for analysis"
97
+ with db_manager._get_connection() as connection:
98
+ # Get ALL conscious-info labeled memories from long-term memory
99
+ cursor = connection.execute(
100
+ text(
101
+ """SELECT memory_id, processed_data, summary, searchable_content,
102
+ importance_score, created_at
103
+ FROM long_term_memory
104
+ WHERE namespace = :namespace AND classification = 'conscious-info'
105
+ ORDER BY importance_score DESC, created_at DESC"""
106
+ ),
107
+ {"namespace": namespace},
141
108
  )
142
- return None
143
-
144
- # Prepare memory data for analysis
145
- memory_summaries = []
146
- for memory in memories:
147
- try:
148
- processed_data = json.loads(memory.get("processed_data", "{}"))
149
- memory_summaries.append(
150
- {
151
- "memory_id": memory.get("memory_id", ""),
152
- "summary": memory.get("summary", ""),
153
- "category": memory.get("category_primary", ""),
154
- "created_at": memory.get("created_at", ""),
155
- "entities": processed_data.get("entities", {}),
156
- "importance": memory.get("importance_score", 0.0),
157
- "access_count": memory.get("access_count", 0),
158
- }
159
- )
160
- except json.JSONDecodeError:
161
- continue
109
+ existing_conscious_memories = cursor.fetchall()
162
110
 
163
- if not memory_summaries:
164
- logger.warning("ConsciouscAgent: No valid memories found for analysis")
165
- return None
111
+ if not existing_conscious_memories:
112
+ logger.debug(
113
+ "ConsciouscAgent: No existing conscious-info memories found for initialization"
114
+ )
115
+ return False
166
116
 
167
- # Perform AI analysis to select essential conversations
168
- analysis = await self._perform_memory_selection(memory_summaries)
117
+ copied_count = 0
118
+ for memory_row in existing_conscious_memories:
119
+ success = await self._copy_memory_to_short_term(
120
+ db_manager, namespace, memory_row
121
+ )
122
+ if success:
123
+ copied_count += 1
169
124
 
170
- if analysis:
171
- self.last_analysis = datetime.now()
125
+ if copied_count > 0:
172
126
  logger.info(
173
- f"ConsciouscAgent: Selected {len(analysis.essential_memories)} essential conversations"
127
+ f"ConsciouscAgent: Initialized {copied_count} existing conscious-info memories to short-term memory"
174
128
  )
175
-
176
- return analysis
177
-
178
- except Exception as e:
179
- logger.error(f"ConsciouscAgent: Memory analysis failed: {e}")
180
- return None
181
-
182
- async def _get_long_term_memories(
183
- self, db_manager, namespace: str
184
- ) -> List[Dict[str, Any]]:
185
- """Get long-term memories for analysis"""
186
- try:
187
- # Get memories from the last 30 days for pattern analysis
188
- cutoff_date = datetime.now() - timedelta(days=30)
189
-
190
- query = """
191
- SELECT memory_id, summary, category_primary, processed_data,
192
- importance_score, created_at, access_count
193
- FROM long_term_memory
194
- WHERE namespace = ? AND created_at >= ?
195
- ORDER BY importance_score DESC, access_count DESC
196
- LIMIT 100
197
- """
198
-
199
- # Execute query through database manager
200
- with db_manager._get_connection() as connection:
201
- cursor = connection.execute(query, (namespace, cutoff_date.isoformat()))
202
-
203
- memories = []
204
- for row in cursor.fetchall():
205
- memories.append(
206
- {
207
- "memory_id": row[0],
208
- "summary": row[1],
209
- "category_primary": row[2],
210
- "processed_data": row[3],
211
- "importance_score": row[4],
212
- "created_at": row[5],
213
- "access_count": row[6],
214
- }
215
- )
216
-
217
- return memories
129
+ return True
130
+ else:
131
+ logger.debug(
132
+ "ConsciouscAgent: No new conscious memories to initialize (all were duplicates)"
133
+ )
134
+ return False
218
135
 
219
136
  except Exception as e:
220
- logger.error(f"ConsciouscAgent: Failed to get long-term memories: {e}")
221
- return []
222
-
223
- async def _perform_memory_selection(
224
- self, memory_summaries: List[Dict]
225
- ) -> Optional[EssentialMemoriesAnalysis]:
226
- """Use AI to select essential conversations from memory patterns"""
227
- try:
228
- # Prepare context for AI analysis
229
- memory_context = self._prepare_memory_context(memory_summaries)
230
-
231
- # Create the analysis prompt
232
- user_prompt = f"""Analyze the following conversations from long-term memory and select the most essential ones to promote to short-term memory:
233
-
234
- AVAILABLE CONVERSATIONS:
235
- {memory_context}
236
-
237
- Select conversations that should be promoted to short-term memory for immediate context. Focus on conversations that:
238
- 1. Contain foundational personal information (name, occupation, preferences)
239
- 2. Are frequently referenced or built upon in later conversations
240
- 3. Provide essential context for understanding the person
241
- 4. Represent stable, long-term characteristics
242
-
243
- For each selected conversation, provide:
244
- - The memory_id
245
- - Frequency score (how often this info is referenced)
246
- - Recency score (how current/relevant this remains)
247
- - Importance score (how critical for understanding the person)
248
- - Clear reasoning for why this conversation is essential
249
-
250
- Limit selection to the top 5-10 most essential conversations."""
251
-
252
- # Make API call with structured output
253
- response = await self.client.beta.chat.completions.parse(
254
- model=self.model,
255
- messages=[
256
- {"role": "system", "content": self.system_prompt},
257
- {"role": "user", "content": user_prompt},
258
- ],
259
- response_format=EssentialMemoriesAnalysis,
260
- temperature=0.1,
137
+ logger.error(
138
+ f"ConsciouscAgent: Failed to initialize existing conscious memories: {e}"
261
139
  )
140
+ return False
262
141
 
263
- analysis = response.choices[0].message.parsed
264
- return analysis
265
-
266
- except Exception as e:
267
- logger.error(f"ConsciouscAgent: Memory selection failed: {e}")
268
- return None
269
-
270
- def _prepare_memory_context(self, memory_summaries: List[Dict]) -> str:
271
- """Prepare memory data for AI analysis"""
272
- context_lines = []
273
-
274
- for i, memory in enumerate(
275
- memory_summaries[:50], 1
276
- ): # Limit to 50 most important
277
- line = f"{i}. ID: {memory['memory_id']} | [{memory['category']}] {memory['summary']}"
278
- line += f" | Importance: {memory['importance']:.2f} | Access: {memory.get('access_count', 0)}"
279
-
280
- if memory.get("entities"):
281
- entities = []
282
- for _entity_type, values in memory["entities"].items():
283
- if values and isinstance(values, list):
284
- # Handle both string entities and structured entities
285
- for value in values:
286
- if isinstance(value, str):
287
- entities.append(value)
288
- elif isinstance(value, dict) and "value" in value:
289
- # Handle structured entities
290
- entities.append(value["value"])
291
- elif hasattr(value, "value"):
292
- # Handle Pydantic model entities
293
- entities.append(value.value)
294
- else:
295
- # Convert any other type to string
296
- entities.append(str(value))
297
-
298
- if entities:
299
- line += f" | Entities: {', '.join(entities[:5])}"
300
-
301
- context_lines.append(line)
302
-
303
- return "\n".join(context_lines)
304
-
305
- async def update_short_term_memories(
306
- self,
307
- db_manager,
308
- analysis: EssentialMemoriesAnalysis,
309
- namespace: str = "default",
310
- ) -> int:
142
+ async def check_for_context_updates(
143
+ self, db_manager, namespace: str = "default"
144
+ ) -> bool:
311
145
  """
312
- Update short-term memory with selected essential conversations
146
+ Check for new conscious-info memories and copy them to short-term memory
313
147
 
314
148
  Args:
315
149
  db_manager: Database manager instance
316
- analysis: Analysis containing selected essential memories
317
150
  namespace: Memory namespace
318
151
 
319
152
  Returns:
320
- Number of conversations copied to short-term memory
153
+ True if new memories were copied, False otherwise
321
154
  """
322
155
  try:
323
- updated_count = 0
156
+ # Get unprocessed conscious memories
157
+ new_memories = await self._get_unprocessed_conscious_memories(
158
+ db_manager, namespace
159
+ )
324
160
 
325
- # Clear existing essential conversations from short-term memory
326
- await self._clear_essential_conversations(db_manager, namespace)
161
+ if not new_memories:
162
+ return False
327
163
 
328
- # Copy each essential conversation to short-term memory
329
- for essential_memory in analysis.essential_memories:
330
- success = await self._copy_conversation_to_short_term(
331
- db_manager, essential_memory, namespace
164
+ # Copy each new memory directly to short-term memory
165
+ copied_count = 0
166
+ for memory_row in new_memories:
167
+ success = await self._copy_memory_to_short_term(
168
+ db_manager, namespace, memory_row
332
169
  )
333
170
  if success:
334
- updated_count += 1
171
+ copied_count += 1
172
+
173
+ # Mark new memories as processed
174
+ memory_ids = [row[0] for row in new_memories] # memory_id is first column
175
+ await self._mark_memories_processed(db_manager, memory_ids, namespace)
335
176
 
336
177
  logger.info(
337
- f"ConsciouscAgent: Copied {updated_count} essential conversations to short-term memory"
178
+ f"ConsciouscAgent: Copied {copied_count} new conscious-info memories to short-term memory"
338
179
  )
339
- return updated_count
180
+ return copied_count > 0
340
181
 
341
182
  except Exception as e:
342
- logger.error(f"ConsciouscAgent: Failed to update short-term memories: {e}")
343
- return 0
183
+ logger.error(f"ConsciouscAgent: Context update failed: {e}")
184
+ return False
344
185
 
345
- async def _clear_essential_conversations(self, db_manager, namespace: str):
346
- """Clear existing essential conversations from short-term memory"""
186
+ async def _get_conscious_memories(self, db_manager, namespace: str) -> List[tuple]:
187
+ """Get all conscious-info labeled memories from long-term memory"""
347
188
  try:
348
- with db_manager._get_connection() as connection:
349
- # Delete conversations marked as essential
350
- query = """
351
- DELETE FROM short_term_memory
352
- WHERE namespace = ? AND category_primary LIKE 'essential_%'
353
- """
189
+ from sqlalchemy import text
354
190
 
355
- connection.execute(query, (namespace,))
356
- connection.commit()
191
+ with db_manager._get_connection() as connection:
192
+ cursor = connection.execute(
193
+ text(
194
+ """SELECT memory_id, processed_data, summary, searchable_content,
195
+ importance_score, created_at
196
+ FROM long_term_memory
197
+ WHERE namespace = :namespace AND classification = 'conscious-info'
198
+ ORDER BY importance_score DESC, created_at DESC"""
199
+ ),
200
+ {"namespace": namespace},
201
+ )
202
+ return cursor.fetchall()
357
203
 
358
204
  except Exception as e:
359
- logger.error(
360
- f"ConsciouscAgent: Failed to clear essential conversations: {e}"
361
- )
205
+ logger.error(f"ConsciouscAgent: Failed to get conscious memories: {e}")
206
+ return []
362
207
 
363
- async def _copy_conversation_to_short_term(
364
- self, db_manager, essential_memory: EssentialMemory, namespace: str
365
- ) -> bool:
366
- """Copy an essential conversation from long-term to short-term memory"""
208
+ async def _get_unprocessed_conscious_memories(
209
+ self, db_manager, namespace: str
210
+ ) -> List[tuple]:
211
+ """Get unprocessed conscious-info labeled memories from long-term memory"""
367
212
  try:
368
- # First, get the original conversation from long-term memory
369
- original_memory = await self._get_original_memory(
370
- db_manager, essential_memory.memory_id
371
- )
213
+ from sqlalchemy import text
372
214
 
373
- if not original_memory:
374
- logger.warning(
375
- f"ConsciouscAgent: Could not find original memory {essential_memory.memory_id}"
215
+ with db_manager._get_connection() as connection:
216
+ cursor = connection.execute(
217
+ text(
218
+ """SELECT memory_id, processed_data, summary, searchable_content,
219
+ importance_score, created_at
220
+ FROM long_term_memory
221
+ WHERE namespace = :namespace AND classification = 'conscious-info'
222
+ AND conscious_processed = :conscious_processed
223
+ ORDER BY importance_score DESC, created_at DESC"""
224
+ ),
225
+ {"namespace": namespace, "conscious_processed": False},
376
226
  )
377
- return False
227
+ return cursor.fetchall()
378
228
 
379
- # Create new memory ID for short-term storage
380
- new_memory_id = str(uuid.uuid4())
381
- now = datetime.now()
229
+ except Exception as e:
230
+ logger.error(f"ConsciouscAgent: Failed to get unprocessed memories: {e}")
231
+ return []
382
232
 
383
- # Create enhanced processed data
384
- try:
385
- original_processed_data = json.loads(
386
- original_memory.get("processed_data", "{}")
387
- )
388
- except json.JSONDecodeError:
389
- original_processed_data = {}
390
-
391
- enhanced_processed_data = original_processed_data.copy()
392
- enhanced_processed_data.update(
393
- {
394
- "promoted_by": "conscious_agent",
395
- "promoted_at": now.isoformat(),
396
- "original_memory_id": essential_memory.memory_id,
397
- "frequency_score": essential_memory.frequency_score,
398
- "recency_score": essential_memory.recency_score,
399
- "promotion_reasoning": essential_memory.relevance_reasoning,
400
- }
401
- )
233
+ async def _copy_memory_to_short_term(
234
+ self, db_manager, namespace: str, memory_row: tuple
235
+ ) -> bool:
236
+ """Copy a conscious memory directly to short-term memory with duplicate filtering"""
237
+ try:
238
+ (
239
+ memory_id,
240
+ processed_data,
241
+ summary,
242
+ searchable_content,
243
+ importance_score,
244
+ _,
245
+ ) = memory_row
246
+
247
+ from sqlalchemy import text
402
248
 
403
- # Store in short-term memory
404
249
  with db_manager._get_connection() as connection:
405
- query = """
406
- INSERT INTO short_term_memory (
407
- memory_id, chat_id, processed_data, importance_score,
408
- category_primary, retention_type, namespace, created_at,
409
- expires_at, searchable_content, summary
410
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
411
- """
250
+ # Check if similar content already exists in short-term memory
251
+ existing_check = connection.execute(
252
+ text(
253
+ """SELECT COUNT(*) FROM short_term_memory
254
+ WHERE namespace = :namespace
255
+ AND category_primary = 'conscious_context'
256
+ AND (searchable_content = :searchable_content
257
+ OR summary = :summary)"""
258
+ ),
259
+ {
260
+ "namespace": namespace,
261
+ "searchable_content": searchable_content,
262
+ "summary": summary,
263
+ },
264
+ )
412
265
 
413
- # Essential conversations expire after 30 days (refreshed by re-analysis)
414
- expires_at = now + timedelta(days=30)
266
+ existing_count = existing_check.scalar()
267
+ if existing_count > 0:
268
+ logger.debug(
269
+ f"ConsciouscAgent: Skipping duplicate memory {memory_id} - similar content already exists in short-term memory"
270
+ )
271
+ return False
272
+
273
+ # Create short-term memory ID
274
+ short_term_id = (
275
+ f"conscious_{memory_id}_{int(datetime.now().timestamp())}"
276
+ )
415
277
 
278
+ # Insert directly into short-term memory with conscious_context category
416
279
  connection.execute(
417
- query,
418
- (
419
- new_memory_id,
420
- original_memory.get(
421
- "original_chat_id"
422
- ), # Preserve original chat_id link
423
- json.dumps(enhanced_processed_data),
424
- essential_memory.importance_score,
425
- f"essential_{original_memory.get('category_primary', 'conversation')}", # Mark as essential
426
- "short_term",
427
- namespace,
428
- now.isoformat(),
429
- expires_at.isoformat(),
430
- original_memory.get(
431
- "searchable_content", essential_memory.summary
432
- ),
433
- essential_memory.summary,
280
+ text(
281
+ """INSERT INTO short_term_memory (
282
+ memory_id, processed_data, importance_score, category_primary,
283
+ retention_type, namespace, created_at, expires_at,
284
+ searchable_content, summary, is_permanent_context
285
+ ) VALUES (:memory_id, :processed_data, :importance_score, :category_primary,
286
+ :retention_type, :namespace, :created_at, :expires_at,
287
+ :searchable_content, :summary, :is_permanent_context)"""
434
288
  ),
289
+ {
290
+ "memory_id": short_term_id,
291
+ "processed_data": (
292
+ json.dumps(processed_data)
293
+ if isinstance(processed_data, dict)
294
+ else processed_data
295
+ ),
296
+ "importance_score": importance_score,
297
+ "category_primary": "conscious_context", # Use conscious_context category
298
+ "retention_type": "permanent",
299
+ "namespace": namespace,
300
+ "created_at": datetime.now().isoformat(),
301
+ "expires_at": None, # No expiration (permanent)
302
+ "searchable_content": searchable_content, # Copy exact searchable_content
303
+ "summary": summary, # Copy exact summary
304
+ "is_permanent_context": True, # is_permanent_context = True
305
+ },
435
306
  )
436
-
437
307
  connection.commit()
438
- return True
308
+
309
+ logger.debug(
310
+ f"ConsciouscAgent: Copied memory {memory_id} to short-term as {short_term_id}"
311
+ )
312
+ return True
439
313
 
440
314
  except Exception as e:
441
315
  logger.error(
442
- f"ConsciouscAgent: Failed to copy conversation to short-term: {e}"
316
+ f"ConsciouscAgent: Failed to copy memory {memory_row[0]} to short-term: {e}"
443
317
  )
444
318
  return False
445
319
 
446
- async def _get_original_memory(self, db_manager, memory_id: str) -> Optional[Dict]:
447
- """Get original memory from long-term storage"""
320
+ async def _mark_memories_processed(
321
+ self, db_manager, memory_ids: List[str], namespace: str
322
+ ):
323
+ """Mark memories as processed for conscious context"""
448
324
  try:
449
- with db_manager._get_connection() as connection:
450
- query = """
451
- SELECT memory_id, original_chat_id, processed_data, importance_score,
452
- category_primary, searchable_content, summary
453
- FROM long_term_memory
454
- WHERE memory_id = ?
455
- """
456
-
457
- cursor = connection.execute(query, (memory_id,))
458
- row = cursor.fetchone()
459
-
460
- if row:
461
- return {
462
- "memory_id": row[0],
463
- "original_chat_id": row[1],
464
- "processed_data": row[2],
465
- "importance_score": row[3],
466
- "category_primary": row[4],
467
- "searchable_content": row[5],
468
- "summary": row[6],
469
- }
470
- return None
471
-
472
- except Exception as e:
473
- logger.error(f"ConsciouscAgent: Failed to get original memory: {e}")
474
- return None
325
+ from sqlalchemy import text
475
326
 
476
- def should_run_analysis(self) -> bool:
477
- """Check if it's time to run memory analysis"""
478
- if self.last_analysis is None:
479
- return True
480
-
481
- return datetime.now() - self.last_analysis >= self.analysis_interval
482
-
483
- async def run_background_analysis(self, db_manager, namespace: str = "default"):
484
- """Run the complete background analysis workflow"""
485
- try:
486
- if not self.should_run_analysis():
487
- return
488
-
489
- logger.info("ConsciouscAgent: Starting background memory analysis")
490
-
491
- # Analyze memory patterns
492
- analysis = await self.analyze_memory_patterns(db_manager, namespace)
493
-
494
- if analysis:
495
- # Update short-term memory with selected conversations
496
- await self.update_short_term_memories(db_manager, analysis, namespace)
497
- logger.info(
498
- "ConsciouscAgent: Background analysis completed successfully"
499
- )
500
- else:
501
- logger.info(
502
- "ConsciouscAgent: No analysis performed (insufficient data)"
503
- )
327
+ with db_manager._get_connection() as connection:
328
+ for memory_id in memory_ids:
329
+ connection.execute(
330
+ text(
331
+ """UPDATE long_term_memory
332
+ SET conscious_processed = :conscious_processed
333
+ WHERE memory_id = :memory_id AND namespace = :namespace"""
334
+ ),
335
+ {
336
+ "memory_id": memory_id,
337
+ "namespace": namespace,
338
+ "conscious_processed": True,
339
+ },
340
+ )
341
+ connection.commit()
504
342
 
505
343
  except Exception as e:
506
- logger.error(f"ConsciouscAgent: Background analysis failed: {e}")
344
+ logger.error(f"ConsciouscAgent: Failed to mark memories processed: {e}")