memorisdk 1.0.2__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +24 -8
- memori/agents/conscious_agent.py +252 -414
- memori/agents/memory_agent.py +487 -224
- memori/agents/retrieval_agent.py +491 -68
- memori/config/memory_manager.py +323 -0
- memori/core/conversation.py +393 -0
- memori/core/database.py +386 -371
- memori/core/memory.py +1683 -532
- memori/core/providers.py +217 -0
- memori/database/adapters/__init__.py +10 -0
- memori/database/adapters/mysql_adapter.py +331 -0
- memori/database/adapters/postgresql_adapter.py +291 -0
- memori/database/adapters/sqlite_adapter.py +229 -0
- memori/database/auto_creator.py +320 -0
- memori/database/connection_utils.py +207 -0
- memori/database/connectors/base_connector.py +283 -0
- memori/database/connectors/mysql_connector.py +240 -18
- memori/database/connectors/postgres_connector.py +277 -4
- memori/database/connectors/sqlite_connector.py +178 -3
- memori/database/models.py +400 -0
- memori/database/queries/base_queries.py +1 -1
- memori/database/queries/memory_queries.py +91 -2
- memori/database/query_translator.py +222 -0
- memori/database/schema_generators/__init__.py +7 -0
- memori/database/schema_generators/mysql_schema_generator.py +215 -0
- memori/database/search/__init__.py +8 -0
- memori/database/search/mysql_search_adapter.py +255 -0
- memori/database/search/sqlite_search_adapter.py +180 -0
- memori/database/search_service.py +700 -0
- memori/database/sqlalchemy_manager.py +888 -0
- memori/integrations/__init__.py +36 -11
- memori/integrations/litellm_integration.py +340 -6
- memori/integrations/openai_integration.py +506 -240
- memori/tools/memory_tool.py +94 -4
- memori/utils/input_validator.py +395 -0
- memori/utils/pydantic_models.py +138 -36
- memori/utils/query_builder.py +530 -0
- memori/utils/security_audit.py +594 -0
- memori/utils/security_integration.py +339 -0
- memori/utils/transaction_manager.py +547 -0
- {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/METADATA +56 -23
- memorisdk-2.0.1.dist-info/RECORD +66 -0
- memori/scripts/llm_text.py +0 -50
- memorisdk-1.0.2.dist-info/RECORD +0 -44
- memorisdk-1.0.2.dist-info/entry_points.txt +0 -2
- {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/WHEEL +0 -0
- {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/top_level.txt +0 -0
memori/agents/conscious_agent.py
CHANGED
|
@@ -1,506 +1,344 @@
|
|
|
1
1
|
"""
|
|
2
|
-
Conscious Agent for
|
|
2
|
+
Conscious Agent for User Context Management
|
|
3
3
|
|
|
4
|
-
This agent
|
|
5
|
-
|
|
4
|
+
This agent copies conscious-info labeled memories from long-term memory
|
|
5
|
+
directly to short-term memory for immediate context availability.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
import json
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
from datetime import datetime, timedelta
|
|
12
|
-
from typing import Any, Dict, List, Optional
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from typing import List
|
|
13
11
|
|
|
14
12
|
from loguru import logger
|
|
15
|
-
from openai import AsyncOpenAI
|
|
16
|
-
from pydantic import BaseModel, Field
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class EssentialMemory(BaseModel):
|
|
20
|
-
"""Essential conversation memory identified for short-term storage"""
|
|
21
|
-
|
|
22
|
-
memory_id: str = Field(description="Original memory ID from long-term storage")
|
|
23
|
-
summary: str = Field(description="Summary of the conversation")
|
|
24
|
-
category: str = Field(description="Memory category")
|
|
25
|
-
importance_score: float = Field(ge=0.0, le=1.0, description="Importance score")
|
|
26
|
-
frequency_score: float = Field(
|
|
27
|
-
ge=0.0, le=1.0, description="How frequently this is referenced"
|
|
28
|
-
)
|
|
29
|
-
recency_score: float = Field(
|
|
30
|
-
ge=0.0, le=1.0, description="How recent this information is"
|
|
31
|
-
)
|
|
32
|
-
relevance_reasoning: str = Field(description="Why this memory is essential")
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
class EssentialMemoriesAnalysis(BaseModel):
|
|
36
|
-
"""Analysis result containing essential memories to promote to short-term"""
|
|
37
|
-
|
|
38
|
-
essential_memories: List[EssentialMemory] = Field(
|
|
39
|
-
default_factory=list,
|
|
40
|
-
description="Conversations that should be promoted to short-term memory",
|
|
41
|
-
)
|
|
42
|
-
analysis_reasoning: str = Field(
|
|
43
|
-
description="Overall reasoning for memory selection"
|
|
44
|
-
)
|
|
45
|
-
total_analyzed: int = Field(description="Total memories analyzed")
|
|
46
|
-
promoted_count: int = Field(
|
|
47
|
-
description="Number of memories recommended for promotion"
|
|
48
|
-
)
|
|
49
13
|
|
|
50
14
|
|
|
51
15
|
class ConsciouscAgent:
|
|
52
16
|
"""
|
|
53
|
-
|
|
17
|
+
Agent that copies conscious-info labeled memories from long-term memory
|
|
18
|
+
directly to short-term memory for immediate context availability.
|
|
54
19
|
|
|
55
|
-
|
|
56
|
-
readily accessible in short-term memory.
|
|
20
|
+
Runs once at program startup when conscious_ingest=True.
|
|
57
21
|
"""
|
|
58
22
|
|
|
59
|
-
def __init__(self
|
|
23
|
+
def __init__(self):
|
|
24
|
+
"""Initialize the conscious agent"""
|
|
25
|
+
self.context_initialized = False
|
|
26
|
+
|
|
27
|
+
async def run_conscious_ingest(
|
|
28
|
+
self, db_manager, namespace: str = "default"
|
|
29
|
+
) -> bool:
|
|
60
30
|
"""
|
|
61
|
-
|
|
31
|
+
Run conscious context ingestion once at program startup
|
|
32
|
+
|
|
33
|
+
Copies all conscious-info labeled memories from long-term memory
|
|
34
|
+
directly to short-term memory as permanent context
|
|
62
35
|
|
|
63
36
|
Args:
|
|
64
|
-
|
|
65
|
-
|
|
37
|
+
db_manager: Database manager instance
|
|
38
|
+
namespace: Memory namespace
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
True if memories were copied, False otherwise
|
|
66
42
|
"""
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
effective_api_key = api_key or os.getenv("OPENAI_API_KEY")
|
|
72
|
-
|
|
73
|
-
if effective_api_key:
|
|
74
|
-
self.client = AsyncOpenAI(
|
|
75
|
-
api_key=api_key
|
|
76
|
-
) # AsyncOpenAI handles None api_key automatically
|
|
77
|
-
else:
|
|
78
|
-
self.client = None
|
|
79
|
-
logger.warning(
|
|
80
|
-
"ConsciouscAgent: No OpenAI API key found. Set OPENAI_API_KEY environment variable or provide api_key parameter."
|
|
43
|
+
try:
|
|
44
|
+
# Get all conscious-info labeled memories
|
|
45
|
+
conscious_memories = await self._get_conscious_memories(
|
|
46
|
+
db_manager, namespace
|
|
81
47
|
)
|
|
82
48
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
# System prompt for memory selection
|
|
87
|
-
self.system_prompt = """You are a Conscious Agent responsible for selecting essential conversations from long-term memory to promote to short-term memory.
|
|
88
|
-
|
|
89
|
-
Your role is to identify the most important conversations that should be readily available for immediate context injection.
|
|
49
|
+
if not conscious_memories:
|
|
50
|
+
logger.info("ConsciouscAgent: No conscious-info memories found")
|
|
51
|
+
return False
|
|
90
52
|
|
|
91
|
-
|
|
53
|
+
# Copy each conscious-info memory directly to short-term memory
|
|
54
|
+
copied_count = 0
|
|
55
|
+
for memory_row in conscious_memories:
|
|
56
|
+
success = await self._copy_memory_to_short_term(
|
|
57
|
+
db_manager, namespace, memory_row
|
|
58
|
+
)
|
|
59
|
+
if success:
|
|
60
|
+
copied_count += 1
|
|
92
61
|
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
6. REPEATED REFERENCES: Conversations that get referenced or built upon in later discussions
|
|
62
|
+
# Mark memories as processed
|
|
63
|
+
memory_ids = [
|
|
64
|
+
row[0] for row in conscious_memories
|
|
65
|
+
] # memory_id is first column
|
|
66
|
+
await self._mark_memories_processed(db_manager, memory_ids, namespace)
|
|
99
67
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
68
|
+
self.context_initialized = True
|
|
69
|
+
logger.info(
|
|
70
|
+
f"ConsciouscAgent: Copied {copied_count} conscious-info memories to short-term memory"
|
|
71
|
+
)
|
|
104
72
|
|
|
105
|
-
|
|
106
|
-
- Contain foundational information about the person (name, role, preferences)
|
|
107
|
-
- Are frequently referenced or built upon in later conversations
|
|
108
|
-
- Provide essential context for understanding future conversations
|
|
109
|
-
- Represent stable, long-term characteristics rather than temporary states
|
|
73
|
+
return copied_count > 0
|
|
110
74
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
- Are highly specific to a single context that hasn't been revisited"""
|
|
75
|
+
except Exception as e:
|
|
76
|
+
logger.error(f"ConsciouscAgent: Conscious ingest failed: {e}")
|
|
77
|
+
return False
|
|
115
78
|
|
|
116
|
-
async def
|
|
117
|
-
self, db_manager, namespace: str = "default"
|
|
118
|
-
) ->
|
|
79
|
+
async def initialize_existing_conscious_memories(
|
|
80
|
+
self, db_manager, namespace: str = "default"
|
|
81
|
+
) -> bool:
|
|
119
82
|
"""
|
|
120
|
-
|
|
83
|
+
Initialize by copying ALL existing conscious-info memories to short-term memory
|
|
84
|
+
This is called when both auto_ingest=True and conscious_ingest=True
|
|
85
|
+
to ensure essential conscious information is immediately available
|
|
121
86
|
|
|
122
87
|
Args:
|
|
123
88
|
db_manager: Database manager instance
|
|
124
|
-
namespace: Memory namespace
|
|
125
|
-
min_memories: Minimum number of memories needed for analysis
|
|
89
|
+
namespace: Memory namespace
|
|
126
90
|
|
|
127
91
|
Returns:
|
|
128
|
-
|
|
92
|
+
True if memories were processed, False otherwise
|
|
129
93
|
"""
|
|
130
|
-
if not self.client:
|
|
131
|
-
logger.debug("ConsciouscAgent: No API client available, skipping analysis")
|
|
132
|
-
return None
|
|
133
|
-
|
|
134
94
|
try:
|
|
135
|
-
|
|
136
|
-
memories = await self._get_long_term_memories(db_manager, namespace)
|
|
95
|
+
from sqlalchemy import text
|
|
137
96
|
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
97
|
+
with db_manager._get_connection() as connection:
|
|
98
|
+
# Get ALL conscious-info labeled memories from long-term memory
|
|
99
|
+
cursor = connection.execute(
|
|
100
|
+
text(
|
|
101
|
+
"""SELECT memory_id, processed_data, summary, searchable_content,
|
|
102
|
+
importance_score, created_at
|
|
103
|
+
FROM long_term_memory
|
|
104
|
+
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
105
|
+
ORDER BY importance_score DESC, created_at DESC"""
|
|
106
|
+
),
|
|
107
|
+
{"namespace": namespace},
|
|
141
108
|
)
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
# Prepare memory data for analysis
|
|
145
|
-
memory_summaries = []
|
|
146
|
-
for memory in memories:
|
|
147
|
-
try:
|
|
148
|
-
processed_data = json.loads(memory.get("processed_data", "{}"))
|
|
149
|
-
memory_summaries.append(
|
|
150
|
-
{
|
|
151
|
-
"memory_id": memory.get("memory_id", ""),
|
|
152
|
-
"summary": memory.get("summary", ""),
|
|
153
|
-
"category": memory.get("category_primary", ""),
|
|
154
|
-
"created_at": memory.get("created_at", ""),
|
|
155
|
-
"entities": processed_data.get("entities", {}),
|
|
156
|
-
"importance": memory.get("importance_score", 0.0),
|
|
157
|
-
"access_count": memory.get("access_count", 0),
|
|
158
|
-
}
|
|
159
|
-
)
|
|
160
|
-
except json.JSONDecodeError:
|
|
161
|
-
continue
|
|
109
|
+
existing_conscious_memories = cursor.fetchall()
|
|
162
110
|
|
|
163
|
-
if not
|
|
164
|
-
logger.
|
|
165
|
-
|
|
111
|
+
if not existing_conscious_memories:
|
|
112
|
+
logger.debug(
|
|
113
|
+
"ConsciouscAgent: No existing conscious-info memories found for initialization"
|
|
114
|
+
)
|
|
115
|
+
return False
|
|
166
116
|
|
|
167
|
-
|
|
168
|
-
|
|
117
|
+
copied_count = 0
|
|
118
|
+
for memory_row in existing_conscious_memories:
|
|
119
|
+
success = await self._copy_memory_to_short_term(
|
|
120
|
+
db_manager, namespace, memory_row
|
|
121
|
+
)
|
|
122
|
+
if success:
|
|
123
|
+
copied_count += 1
|
|
169
124
|
|
|
170
|
-
if
|
|
171
|
-
self.last_analysis = datetime.now()
|
|
125
|
+
if copied_count > 0:
|
|
172
126
|
logger.info(
|
|
173
|
-
f"ConsciouscAgent:
|
|
127
|
+
f"ConsciouscAgent: Initialized {copied_count} existing conscious-info memories to short-term memory"
|
|
174
128
|
)
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
async def _get_long_term_memories(
|
|
183
|
-
self, db_manager, namespace: str
|
|
184
|
-
) -> List[Dict[str, Any]]:
|
|
185
|
-
"""Get long-term memories for analysis"""
|
|
186
|
-
try:
|
|
187
|
-
# Get memories from the last 30 days for pattern analysis
|
|
188
|
-
cutoff_date = datetime.now() - timedelta(days=30)
|
|
189
|
-
|
|
190
|
-
query = """
|
|
191
|
-
SELECT memory_id, summary, category_primary, processed_data,
|
|
192
|
-
importance_score, created_at, access_count
|
|
193
|
-
FROM long_term_memory
|
|
194
|
-
WHERE namespace = ? AND created_at >= ?
|
|
195
|
-
ORDER BY importance_score DESC, access_count DESC
|
|
196
|
-
LIMIT 100
|
|
197
|
-
"""
|
|
198
|
-
|
|
199
|
-
# Execute query through database manager
|
|
200
|
-
with db_manager._get_connection() as connection:
|
|
201
|
-
cursor = connection.execute(query, (namespace, cutoff_date.isoformat()))
|
|
202
|
-
|
|
203
|
-
memories = []
|
|
204
|
-
for row in cursor.fetchall():
|
|
205
|
-
memories.append(
|
|
206
|
-
{
|
|
207
|
-
"memory_id": row[0],
|
|
208
|
-
"summary": row[1],
|
|
209
|
-
"category_primary": row[2],
|
|
210
|
-
"processed_data": row[3],
|
|
211
|
-
"importance_score": row[4],
|
|
212
|
-
"created_at": row[5],
|
|
213
|
-
"access_count": row[6],
|
|
214
|
-
}
|
|
215
|
-
)
|
|
216
|
-
|
|
217
|
-
return memories
|
|
129
|
+
return True
|
|
130
|
+
else:
|
|
131
|
+
logger.debug(
|
|
132
|
+
"ConsciouscAgent: No new conscious memories to initialize (all were duplicates)"
|
|
133
|
+
)
|
|
134
|
+
return False
|
|
218
135
|
|
|
219
136
|
except Exception as e:
|
|
220
|
-
logger.error(
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
async def _perform_memory_selection(
|
|
224
|
-
self, memory_summaries: List[Dict]
|
|
225
|
-
) -> Optional[EssentialMemoriesAnalysis]:
|
|
226
|
-
"""Use AI to select essential conversations from memory patterns"""
|
|
227
|
-
try:
|
|
228
|
-
# Prepare context for AI analysis
|
|
229
|
-
memory_context = self._prepare_memory_context(memory_summaries)
|
|
230
|
-
|
|
231
|
-
# Create the analysis prompt
|
|
232
|
-
user_prompt = f"""Analyze the following conversations from long-term memory and select the most essential ones to promote to short-term memory:
|
|
233
|
-
|
|
234
|
-
AVAILABLE CONVERSATIONS:
|
|
235
|
-
{memory_context}
|
|
236
|
-
|
|
237
|
-
Select conversations that should be promoted to short-term memory for immediate context. Focus on conversations that:
|
|
238
|
-
1. Contain foundational personal information (name, occupation, preferences)
|
|
239
|
-
2. Are frequently referenced or built upon in later conversations
|
|
240
|
-
3. Provide essential context for understanding the person
|
|
241
|
-
4. Represent stable, long-term characteristics
|
|
242
|
-
|
|
243
|
-
For each selected conversation, provide:
|
|
244
|
-
- The memory_id
|
|
245
|
-
- Frequency score (how often this info is referenced)
|
|
246
|
-
- Recency score (how current/relevant this remains)
|
|
247
|
-
- Importance score (how critical for understanding the person)
|
|
248
|
-
- Clear reasoning for why this conversation is essential
|
|
249
|
-
|
|
250
|
-
Limit selection to the top 5-10 most essential conversations."""
|
|
251
|
-
|
|
252
|
-
# Make API call with structured output
|
|
253
|
-
response = await self.client.beta.chat.completions.parse(
|
|
254
|
-
model=self.model,
|
|
255
|
-
messages=[
|
|
256
|
-
{"role": "system", "content": self.system_prompt},
|
|
257
|
-
{"role": "user", "content": user_prompt},
|
|
258
|
-
],
|
|
259
|
-
response_format=EssentialMemoriesAnalysis,
|
|
260
|
-
temperature=0.1,
|
|
137
|
+
logger.error(
|
|
138
|
+
f"ConsciouscAgent: Failed to initialize existing conscious memories: {e}"
|
|
261
139
|
)
|
|
140
|
+
return False
|
|
262
141
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
except Exception as e:
|
|
267
|
-
logger.error(f"ConsciouscAgent: Memory selection failed: {e}")
|
|
268
|
-
return None
|
|
269
|
-
|
|
270
|
-
def _prepare_memory_context(self, memory_summaries: List[Dict]) -> str:
|
|
271
|
-
"""Prepare memory data for AI analysis"""
|
|
272
|
-
context_lines = []
|
|
273
|
-
|
|
274
|
-
for i, memory in enumerate(
|
|
275
|
-
memory_summaries[:50], 1
|
|
276
|
-
): # Limit to 50 most important
|
|
277
|
-
line = f"{i}. ID: {memory['memory_id']} | [{memory['category']}] {memory['summary']}"
|
|
278
|
-
line += f" | Importance: {memory['importance']:.2f} | Access: {memory.get('access_count', 0)}"
|
|
279
|
-
|
|
280
|
-
if memory.get("entities"):
|
|
281
|
-
entities = []
|
|
282
|
-
for _entity_type, values in memory["entities"].items():
|
|
283
|
-
if values and isinstance(values, list):
|
|
284
|
-
# Handle both string entities and structured entities
|
|
285
|
-
for value in values:
|
|
286
|
-
if isinstance(value, str):
|
|
287
|
-
entities.append(value)
|
|
288
|
-
elif isinstance(value, dict) and "value" in value:
|
|
289
|
-
# Handle structured entities
|
|
290
|
-
entities.append(value["value"])
|
|
291
|
-
elif hasattr(value, "value"):
|
|
292
|
-
# Handle Pydantic model entities
|
|
293
|
-
entities.append(value.value)
|
|
294
|
-
else:
|
|
295
|
-
# Convert any other type to string
|
|
296
|
-
entities.append(str(value))
|
|
297
|
-
|
|
298
|
-
if entities:
|
|
299
|
-
line += f" | Entities: {', '.join(entities[:5])}"
|
|
300
|
-
|
|
301
|
-
context_lines.append(line)
|
|
302
|
-
|
|
303
|
-
return "\n".join(context_lines)
|
|
304
|
-
|
|
305
|
-
async def update_short_term_memories(
|
|
306
|
-
self,
|
|
307
|
-
db_manager,
|
|
308
|
-
analysis: EssentialMemoriesAnalysis,
|
|
309
|
-
namespace: str = "default",
|
|
310
|
-
) -> int:
|
|
142
|
+
async def check_for_context_updates(
|
|
143
|
+
self, db_manager, namespace: str = "default"
|
|
144
|
+
) -> bool:
|
|
311
145
|
"""
|
|
312
|
-
|
|
146
|
+
Check for new conscious-info memories and copy them to short-term memory
|
|
313
147
|
|
|
314
148
|
Args:
|
|
315
149
|
db_manager: Database manager instance
|
|
316
|
-
analysis: Analysis containing selected essential memories
|
|
317
150
|
namespace: Memory namespace
|
|
318
151
|
|
|
319
152
|
Returns:
|
|
320
|
-
|
|
153
|
+
True if new memories were copied, False otherwise
|
|
321
154
|
"""
|
|
322
155
|
try:
|
|
323
|
-
|
|
156
|
+
# Get unprocessed conscious memories
|
|
157
|
+
new_memories = await self._get_unprocessed_conscious_memories(
|
|
158
|
+
db_manager, namespace
|
|
159
|
+
)
|
|
324
160
|
|
|
325
|
-
|
|
326
|
-
|
|
161
|
+
if not new_memories:
|
|
162
|
+
return False
|
|
327
163
|
|
|
328
|
-
# Copy each
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
164
|
+
# Copy each new memory directly to short-term memory
|
|
165
|
+
copied_count = 0
|
|
166
|
+
for memory_row in new_memories:
|
|
167
|
+
success = await self._copy_memory_to_short_term(
|
|
168
|
+
db_manager, namespace, memory_row
|
|
332
169
|
)
|
|
333
170
|
if success:
|
|
334
|
-
|
|
171
|
+
copied_count += 1
|
|
172
|
+
|
|
173
|
+
# Mark new memories as processed
|
|
174
|
+
memory_ids = [row[0] for row in new_memories] # memory_id is first column
|
|
175
|
+
await self._mark_memories_processed(db_manager, memory_ids, namespace)
|
|
335
176
|
|
|
336
177
|
logger.info(
|
|
337
|
-
f"ConsciouscAgent: Copied {
|
|
178
|
+
f"ConsciouscAgent: Copied {copied_count} new conscious-info memories to short-term memory"
|
|
338
179
|
)
|
|
339
|
-
return
|
|
180
|
+
return copied_count > 0
|
|
340
181
|
|
|
341
182
|
except Exception as e:
|
|
342
|
-
logger.error(f"ConsciouscAgent:
|
|
343
|
-
return
|
|
183
|
+
logger.error(f"ConsciouscAgent: Context update failed: {e}")
|
|
184
|
+
return False
|
|
344
185
|
|
|
345
|
-
async def
|
|
346
|
-
"""
|
|
186
|
+
async def _get_conscious_memories(self, db_manager, namespace: str) -> List[tuple]:
|
|
187
|
+
"""Get all conscious-info labeled memories from long-term memory"""
|
|
347
188
|
try:
|
|
348
|
-
|
|
349
|
-
# Delete conversations marked as essential
|
|
350
|
-
query = """
|
|
351
|
-
DELETE FROM short_term_memory
|
|
352
|
-
WHERE namespace = ? AND category_primary LIKE 'essential_%'
|
|
353
|
-
"""
|
|
189
|
+
from sqlalchemy import text
|
|
354
190
|
|
|
355
|
-
|
|
356
|
-
connection.
|
|
191
|
+
with db_manager._get_connection() as connection:
|
|
192
|
+
cursor = connection.execute(
|
|
193
|
+
text(
|
|
194
|
+
"""SELECT memory_id, processed_data, summary, searchable_content,
|
|
195
|
+
importance_score, created_at
|
|
196
|
+
FROM long_term_memory
|
|
197
|
+
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
198
|
+
ORDER BY importance_score DESC, created_at DESC"""
|
|
199
|
+
),
|
|
200
|
+
{"namespace": namespace},
|
|
201
|
+
)
|
|
202
|
+
return cursor.fetchall()
|
|
357
203
|
|
|
358
204
|
except Exception as e:
|
|
359
|
-
logger.error(
|
|
360
|
-
|
|
361
|
-
)
|
|
205
|
+
logger.error(f"ConsciouscAgent: Failed to get conscious memories: {e}")
|
|
206
|
+
return []
|
|
362
207
|
|
|
363
|
-
async def
|
|
364
|
-
self, db_manager,
|
|
365
|
-
) ->
|
|
366
|
-
"""
|
|
208
|
+
async def _get_unprocessed_conscious_memories(
|
|
209
|
+
self, db_manager, namespace: str
|
|
210
|
+
) -> List[tuple]:
|
|
211
|
+
"""Get unprocessed conscious-info labeled memories from long-term memory"""
|
|
367
212
|
try:
|
|
368
|
-
|
|
369
|
-
original_memory = await self._get_original_memory(
|
|
370
|
-
db_manager, essential_memory.memory_id
|
|
371
|
-
)
|
|
213
|
+
from sqlalchemy import text
|
|
372
214
|
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
215
|
+
with db_manager._get_connection() as connection:
|
|
216
|
+
cursor = connection.execute(
|
|
217
|
+
text(
|
|
218
|
+
"""SELECT memory_id, processed_data, summary, searchable_content,
|
|
219
|
+
importance_score, created_at
|
|
220
|
+
FROM long_term_memory
|
|
221
|
+
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
222
|
+
AND conscious_processed = :conscious_processed
|
|
223
|
+
ORDER BY importance_score DESC, created_at DESC"""
|
|
224
|
+
),
|
|
225
|
+
{"namespace": namespace, "conscious_processed": False},
|
|
376
226
|
)
|
|
377
|
-
return
|
|
227
|
+
return cursor.fetchall()
|
|
378
228
|
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
229
|
+
except Exception as e:
|
|
230
|
+
logger.error(f"ConsciouscAgent: Failed to get unprocessed memories: {e}")
|
|
231
|
+
return []
|
|
382
232
|
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
"recency_score": essential_memory.recency_score,
|
|
399
|
-
"promotion_reasoning": essential_memory.relevance_reasoning,
|
|
400
|
-
}
|
|
401
|
-
)
|
|
233
|
+
async def _copy_memory_to_short_term(
|
|
234
|
+
self, db_manager, namespace: str, memory_row: tuple
|
|
235
|
+
) -> bool:
|
|
236
|
+
"""Copy a conscious memory directly to short-term memory with duplicate filtering"""
|
|
237
|
+
try:
|
|
238
|
+
(
|
|
239
|
+
memory_id,
|
|
240
|
+
processed_data,
|
|
241
|
+
summary,
|
|
242
|
+
searchable_content,
|
|
243
|
+
importance_score,
|
|
244
|
+
_,
|
|
245
|
+
) = memory_row
|
|
246
|
+
|
|
247
|
+
from sqlalchemy import text
|
|
402
248
|
|
|
403
|
-
# Store in short-term memory
|
|
404
249
|
with db_manager._get_connection() as connection:
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
250
|
+
# Check if similar content already exists in short-term memory
|
|
251
|
+
existing_check = connection.execute(
|
|
252
|
+
text(
|
|
253
|
+
"""SELECT COUNT(*) FROM short_term_memory
|
|
254
|
+
WHERE namespace = :namespace
|
|
255
|
+
AND category_primary = 'conscious_context'
|
|
256
|
+
AND (searchable_content = :searchable_content
|
|
257
|
+
OR summary = :summary)"""
|
|
258
|
+
),
|
|
259
|
+
{
|
|
260
|
+
"namespace": namespace,
|
|
261
|
+
"searchable_content": searchable_content,
|
|
262
|
+
"summary": summary,
|
|
263
|
+
},
|
|
264
|
+
)
|
|
412
265
|
|
|
413
|
-
|
|
414
|
-
|
|
266
|
+
existing_count = existing_check.scalar()
|
|
267
|
+
if existing_count > 0:
|
|
268
|
+
logger.debug(
|
|
269
|
+
f"ConsciouscAgent: Skipping duplicate memory {memory_id} - similar content already exists in short-term memory"
|
|
270
|
+
)
|
|
271
|
+
return False
|
|
272
|
+
|
|
273
|
+
# Create short-term memory ID
|
|
274
|
+
short_term_id = (
|
|
275
|
+
f"conscious_{memory_id}_{int(datetime.now().timestamp())}"
|
|
276
|
+
)
|
|
415
277
|
|
|
278
|
+
# Insert directly into short-term memory with conscious_context category
|
|
416
279
|
connection.execute(
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
f"essential_{original_memory.get('category_primary', 'conversation')}", # Mark as essential
|
|
426
|
-
"short_term",
|
|
427
|
-
namespace,
|
|
428
|
-
now.isoformat(),
|
|
429
|
-
expires_at.isoformat(),
|
|
430
|
-
original_memory.get(
|
|
431
|
-
"searchable_content", essential_memory.summary
|
|
432
|
-
),
|
|
433
|
-
essential_memory.summary,
|
|
280
|
+
text(
|
|
281
|
+
"""INSERT INTO short_term_memory (
|
|
282
|
+
memory_id, processed_data, importance_score, category_primary,
|
|
283
|
+
retention_type, namespace, created_at, expires_at,
|
|
284
|
+
searchable_content, summary, is_permanent_context
|
|
285
|
+
) VALUES (:memory_id, :processed_data, :importance_score, :category_primary,
|
|
286
|
+
:retention_type, :namespace, :created_at, :expires_at,
|
|
287
|
+
:searchable_content, :summary, :is_permanent_context)"""
|
|
434
288
|
),
|
|
289
|
+
{
|
|
290
|
+
"memory_id": short_term_id,
|
|
291
|
+
"processed_data": (
|
|
292
|
+
json.dumps(processed_data)
|
|
293
|
+
if isinstance(processed_data, dict)
|
|
294
|
+
else processed_data
|
|
295
|
+
),
|
|
296
|
+
"importance_score": importance_score,
|
|
297
|
+
"category_primary": "conscious_context", # Use conscious_context category
|
|
298
|
+
"retention_type": "permanent",
|
|
299
|
+
"namespace": namespace,
|
|
300
|
+
"created_at": datetime.now().isoformat(),
|
|
301
|
+
"expires_at": None, # No expiration (permanent)
|
|
302
|
+
"searchable_content": searchable_content, # Copy exact searchable_content
|
|
303
|
+
"summary": summary, # Copy exact summary
|
|
304
|
+
"is_permanent_context": True, # is_permanent_context = True
|
|
305
|
+
},
|
|
435
306
|
)
|
|
436
|
-
|
|
437
307
|
connection.commit()
|
|
438
|
-
|
|
308
|
+
|
|
309
|
+
logger.debug(
|
|
310
|
+
f"ConsciouscAgent: Copied memory {memory_id} to short-term as {short_term_id}"
|
|
311
|
+
)
|
|
312
|
+
return True
|
|
439
313
|
|
|
440
314
|
except Exception as e:
|
|
441
315
|
logger.error(
|
|
442
|
-
f"ConsciouscAgent: Failed to copy
|
|
316
|
+
f"ConsciouscAgent: Failed to copy memory {memory_row[0]} to short-term: {e}"
|
|
443
317
|
)
|
|
444
318
|
return False
|
|
445
319
|
|
|
446
|
-
async def
|
|
447
|
-
|
|
320
|
+
async def _mark_memories_processed(
|
|
321
|
+
self, db_manager, memory_ids: List[str], namespace: str
|
|
322
|
+
):
|
|
323
|
+
"""Mark memories as processed for conscious context"""
|
|
448
324
|
try:
|
|
449
|
-
|
|
450
|
-
query = """
|
|
451
|
-
SELECT memory_id, original_chat_id, processed_data, importance_score,
|
|
452
|
-
category_primary, searchable_content, summary
|
|
453
|
-
FROM long_term_memory
|
|
454
|
-
WHERE memory_id = ?
|
|
455
|
-
"""
|
|
456
|
-
|
|
457
|
-
cursor = connection.execute(query, (memory_id,))
|
|
458
|
-
row = cursor.fetchone()
|
|
459
|
-
|
|
460
|
-
if row:
|
|
461
|
-
return {
|
|
462
|
-
"memory_id": row[0],
|
|
463
|
-
"original_chat_id": row[1],
|
|
464
|
-
"processed_data": row[2],
|
|
465
|
-
"importance_score": row[3],
|
|
466
|
-
"category_primary": row[4],
|
|
467
|
-
"searchable_content": row[5],
|
|
468
|
-
"summary": row[6],
|
|
469
|
-
}
|
|
470
|
-
return None
|
|
471
|
-
|
|
472
|
-
except Exception as e:
|
|
473
|
-
logger.error(f"ConsciouscAgent: Failed to get original memory: {e}")
|
|
474
|
-
return None
|
|
325
|
+
from sqlalchemy import text
|
|
475
326
|
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
# Analyze memory patterns
|
|
492
|
-
analysis = await self.analyze_memory_patterns(db_manager, namespace)
|
|
493
|
-
|
|
494
|
-
if analysis:
|
|
495
|
-
# Update short-term memory with selected conversations
|
|
496
|
-
await self.update_short_term_memories(db_manager, analysis, namespace)
|
|
497
|
-
logger.info(
|
|
498
|
-
"ConsciouscAgent: Background analysis completed successfully"
|
|
499
|
-
)
|
|
500
|
-
else:
|
|
501
|
-
logger.info(
|
|
502
|
-
"ConsciouscAgent: No analysis performed (insufficient data)"
|
|
503
|
-
)
|
|
327
|
+
with db_manager._get_connection() as connection:
|
|
328
|
+
for memory_id in memory_ids:
|
|
329
|
+
connection.execute(
|
|
330
|
+
text(
|
|
331
|
+
"""UPDATE long_term_memory
|
|
332
|
+
SET conscious_processed = :conscious_processed
|
|
333
|
+
WHERE memory_id = :memory_id AND namespace = :namespace"""
|
|
334
|
+
),
|
|
335
|
+
{
|
|
336
|
+
"memory_id": memory_id,
|
|
337
|
+
"namespace": namespace,
|
|
338
|
+
"conscious_processed": True,
|
|
339
|
+
},
|
|
340
|
+
)
|
|
341
|
+
connection.commit()
|
|
504
342
|
|
|
505
343
|
except Exception as e:
|
|
506
|
-
logger.error(f"ConsciouscAgent:
|
|
344
|
+
logger.error(f"ConsciouscAgent: Failed to mark memories processed: {e}")
|