ambivo-agents 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ambivo_agents/__init__.py +91 -0
- ambivo_agents/agents/__init__.py +21 -0
- ambivo_agents/agents/assistant.py +203 -0
- ambivo_agents/agents/code_executor.py +133 -0
- ambivo_agents/agents/code_executor2.py +222 -0
- ambivo_agents/agents/knowledge_base.py +935 -0
- ambivo_agents/agents/media_editor.py +992 -0
- ambivo_agents/agents/moderator.py +617 -0
- ambivo_agents/agents/simple_web_search.py +404 -0
- ambivo_agents/agents/web_scraper.py +1027 -0
- ambivo_agents/agents/web_search.py +933 -0
- ambivo_agents/agents/youtube_download.py +784 -0
- ambivo_agents/cli.py +699 -0
- ambivo_agents/config/__init__.py +4 -0
- ambivo_agents/config/loader.py +301 -0
- ambivo_agents/core/__init__.py +33 -0
- ambivo_agents/core/base.py +1024 -0
- ambivo_agents/core/history.py +606 -0
- ambivo_agents/core/llm.py +333 -0
- ambivo_agents/core/memory.py +640 -0
- ambivo_agents/executors/__init__.py +8 -0
- ambivo_agents/executors/docker_executor.py +108 -0
- ambivo_agents/executors/media_executor.py +237 -0
- ambivo_agents/executors/youtube_executor.py +404 -0
- ambivo_agents/services/__init__.py +6 -0
- ambivo_agents/services/agent_service.py +605 -0
- ambivo_agents/services/factory.py +370 -0
- ambivo_agents-1.0.1.dist-info/METADATA +1090 -0
- ambivo_agents-1.0.1.dist-info/RECORD +33 -0
- ambivo_agents-1.0.1.dist-info/WHEEL +5 -0
- ambivo_agents-1.0.1.dist-info/entry_points.txt +3 -0
- ambivo_agents-1.0.1.dist-info/licenses/LICENSE +21 -0
- ambivo_agents-1.0.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,640 @@
|
|
1
|
+
# ambivo_agents/core/memory.py
|
2
|
+
"""
|
3
|
+
Memory management system for ambivo_agents.
|
4
|
+
FIXED: Redis key consistency issue for session history retrieval
|
5
|
+
"""
|
6
|
+
|
7
|
+
import json
|
8
|
+
import gzip
|
9
|
+
import time
|
10
|
+
import logging
|
11
|
+
import threading
|
12
|
+
from abc import ABC, abstractmethod
|
13
|
+
from dataclasses import dataclass
|
14
|
+
from datetime import datetime
|
15
|
+
from typing import Dict, List, Any, Optional, Union
|
16
|
+
from collections import OrderedDict
|
17
|
+
import hashlib
|
18
|
+
import base64
|
19
|
+
|
20
|
+
from ..config.loader import load_config, get_config_section
|
21
|
+
|
22
|
+
# External dependencies with fallbacks
|
23
|
+
try:
|
24
|
+
import redis
|
25
|
+
from cachetools import TTLCache
|
26
|
+
import lz4.frame
|
27
|
+
|
28
|
+
REDIS_AVAILABLE = True
|
29
|
+
COMPRESSION_AVAILABLE = True
|
30
|
+
except ImportError:
|
31
|
+
REDIS_AVAILABLE = False
|
32
|
+
COMPRESSION_AVAILABLE = False
|
33
|
+
|
34
|
+
|
35
|
+
@dataclass
|
36
|
+
class MemoryStats:
|
37
|
+
"""Memory usage and performance statistics"""
|
38
|
+
total_operations: int = 0
|
39
|
+
cache_hits: int = 0
|
40
|
+
cache_misses: int = 0
|
41
|
+
compression_savings_bytes: int = 0
|
42
|
+
avg_response_time_ms: float = 0.0
|
43
|
+
redis_memory_usage_bytes: int = 0
|
44
|
+
local_cache_size: int = 0
|
45
|
+
error_count: int = 0
|
46
|
+
|
47
|
+
@property
|
48
|
+
def cache_hit_rate(self) -> float:
|
49
|
+
"""Calculate cache hit rate percentage"""
|
50
|
+
total = self.cache_hits + self.cache_misses
|
51
|
+
return (self.cache_hits / total * 100) if total > 0 else 0.0
|
52
|
+
|
53
|
+
|
54
|
+
class MemoryManagerInterface(ABC):
|
55
|
+
"""Abstract interface for memory management"""
|
56
|
+
|
57
|
+
@abstractmethod
|
58
|
+
def store_message(self, message):
|
59
|
+
"""Store a message in memory"""
|
60
|
+
pass
|
61
|
+
|
62
|
+
@abstractmethod
|
63
|
+
def get_recent_messages(self, limit: int = 10, conversation_id: Optional[str] = None):
|
64
|
+
"""Retrieve recent messages from memory"""
|
65
|
+
pass
|
66
|
+
|
67
|
+
@abstractmethod
|
68
|
+
def store_context(self, key: str, value: Any, conversation_id: Optional[str] = None):
|
69
|
+
"""Store contextual information"""
|
70
|
+
pass
|
71
|
+
|
72
|
+
@abstractmethod
|
73
|
+
def get_context(self, key: str, conversation_id: Optional[str] = None):
|
74
|
+
"""Retrieve contextual information"""
|
75
|
+
pass
|
76
|
+
|
77
|
+
@abstractmethod
|
78
|
+
def clear_memory(self, conversation_id: Optional[str] = None):
|
79
|
+
"""Clear memory"""
|
80
|
+
pass
|
81
|
+
|
82
|
+
|
83
|
+
class CompressionManager:
|
84
|
+
"""Handles data compression with safe UTF-8 handling"""
|
85
|
+
|
86
|
+
def __init__(self, enabled: bool = True, algorithm: str = 'lz4', compression_level: int = 1):
|
87
|
+
self.enabled = enabled
|
88
|
+
self.algorithm = algorithm
|
89
|
+
self.compression_level = compression_level
|
90
|
+
self.min_size_bytes = 100
|
91
|
+
self.stats = {'compressed_count': 0, 'decompressed_count': 0, 'bytes_saved': 0}
|
92
|
+
|
93
|
+
def compress(self, data: str) -> str:
|
94
|
+
"""Compress string data with safe UTF-8 handling"""
|
95
|
+
if not self.enabled or len(data) < self.min_size_bytes or not COMPRESSION_AVAILABLE:
|
96
|
+
return data
|
97
|
+
|
98
|
+
try:
|
99
|
+
if isinstance(data, str):
|
100
|
+
data_bytes = data.encode('utf-8', errors='replace')
|
101
|
+
else:
|
102
|
+
data_bytes = str(data).encode('utf-8', errors='replace')
|
103
|
+
|
104
|
+
if self.algorithm == 'gzip':
|
105
|
+
compressed = gzip.compress(data_bytes, compresslevel=self.compression_level)
|
106
|
+
elif self.algorithm == 'lz4':
|
107
|
+
compressed = lz4.frame.compress(data_bytes, compression_level=self.compression_level)
|
108
|
+
else:
|
109
|
+
return data
|
110
|
+
|
111
|
+
original_size = len(data_bytes)
|
112
|
+
compressed_size = len(compressed)
|
113
|
+
|
114
|
+
if compressed_size < original_size:
|
115
|
+
self.stats['bytes_saved'] += (original_size - compressed_size)
|
116
|
+
self.stats['compressed_count'] += 1
|
117
|
+
|
118
|
+
compressed_b64 = base64.b64encode(compressed).decode('ascii')
|
119
|
+
return f'COMPRESSED:{self.algorithm}:{compressed_b64}'
|
120
|
+
|
121
|
+
return data
|
122
|
+
|
123
|
+
except Exception as e:
|
124
|
+
logging.error(f"Compression failed: {e}")
|
125
|
+
return data
|
126
|
+
|
127
|
+
def decompress(self, data: str) -> str:
|
128
|
+
"""Decompress data with safe UTF-8 handling"""
|
129
|
+
if not isinstance(data, str) or not data.startswith('COMPRESSED:'):
|
130
|
+
return str(data)
|
131
|
+
|
132
|
+
try:
|
133
|
+
parts = data.split(':', 2)
|
134
|
+
if len(parts) == 3:
|
135
|
+
algorithm = parts[1]
|
136
|
+
compressed_b64 = parts[2]
|
137
|
+
|
138
|
+
compressed_data = base64.b64decode(compressed_b64.encode('ascii'))
|
139
|
+
|
140
|
+
if algorithm == 'gzip':
|
141
|
+
decompressed = gzip.decompress(compressed_data).decode('utf-8', errors='replace')
|
142
|
+
elif algorithm == 'lz4':
|
143
|
+
decompressed = lz4.frame.decompress(compressed_data).decode('utf-8', errors='replace')
|
144
|
+
else:
|
145
|
+
decompressed = compressed_data.decode('utf-8', errors='replace')
|
146
|
+
|
147
|
+
self.stats['decompressed_count'] += 1
|
148
|
+
return decompressed
|
149
|
+
|
150
|
+
return data
|
151
|
+
|
152
|
+
except Exception as e:
|
153
|
+
logging.error(f"Decompression failed: {e}")
|
154
|
+
return str(data)
|
155
|
+
|
156
|
+
|
157
|
+
class IntelligentCache:
|
158
|
+
"""Intelligent caching with safe encoding"""
|
159
|
+
|
160
|
+
def __init__(self, enabled: bool = True, max_size: int = 1000, ttl_seconds: int = 300):
|
161
|
+
self.enabled = enabled
|
162
|
+
self.max_size = max_size
|
163
|
+
self.ttl_seconds = ttl_seconds
|
164
|
+
self.cache: TTLCache = TTLCache(maxsize=max_size, ttl=ttl_seconds)
|
165
|
+
self.stats = {'hits': 0, 'misses': 0, 'evictions': 0}
|
166
|
+
self._lock = threading.RLock()
|
167
|
+
|
168
|
+
def _safe_key(self, key: str) -> str:
|
169
|
+
"""Ensure key is safe for caching"""
|
170
|
+
if isinstance(key, bytes):
|
171
|
+
return key.decode('utf-8', errors='replace')
|
172
|
+
return str(key)
|
173
|
+
|
174
|
+
def get(self, key: str) -> Optional[Any]:
|
175
|
+
"""Get item from cache with safe key handling"""
|
176
|
+
if not self.enabled:
|
177
|
+
return None
|
178
|
+
|
179
|
+
with self._lock:
|
180
|
+
try:
|
181
|
+
safe_key = self._safe_key(key)
|
182
|
+
value = self.cache[safe_key]
|
183
|
+
self.stats['hits'] += 1
|
184
|
+
return value
|
185
|
+
except KeyError:
|
186
|
+
self.stats['misses'] += 1
|
187
|
+
return None
|
188
|
+
except Exception as e:
|
189
|
+
logging.error(f"Cache get error: {e}")
|
190
|
+
self.stats['misses'] += 1
|
191
|
+
return None
|
192
|
+
|
193
|
+
def set(self, key: str, value: Any) -> None:
|
194
|
+
"""Set item in cache with safe key handling"""
|
195
|
+
if not self.enabled:
|
196
|
+
return
|
197
|
+
|
198
|
+
with self._lock:
|
199
|
+
try:
|
200
|
+
safe_key = self._safe_key(key)
|
201
|
+
if len(self.cache) >= self.max_size:
|
202
|
+
self.stats['evictions'] += 1
|
203
|
+
self.cache[safe_key] = value
|
204
|
+
except Exception as e:
|
205
|
+
logging.error(f"Cache set error: {e}")
|
206
|
+
|
207
|
+
def delete(self, key: str) -> bool:
|
208
|
+
"""Delete item from cache"""
|
209
|
+
if not self.enabled:
|
210
|
+
return False
|
211
|
+
|
212
|
+
with self._lock:
|
213
|
+
try:
|
214
|
+
safe_key = self._safe_key(key)
|
215
|
+
del self.cache[safe_key]
|
216
|
+
return True
|
217
|
+
except KeyError:
|
218
|
+
return False
|
219
|
+
except Exception as e:
|
220
|
+
logging.error(f"Cache delete error: {e}")
|
221
|
+
return False
|
222
|
+
|
223
|
+
def clear(self) -> None:
|
224
|
+
"""Clear all cache entries"""
|
225
|
+
with self._lock:
|
226
|
+
self.cache.clear()
|
227
|
+
|
228
|
+
|
229
|
+
class RedisMemoryManager(MemoryManagerInterface):
|
230
|
+
"""Redis memory manager with session-based keys and UTF-8 handling - FIXED KEY CONSISTENCY"""
|
231
|
+
|
232
|
+
def __init__(self, agent_id: str, redis_config: Dict[str, Any] = None):
|
233
|
+
self.agent_id = agent_id
|
234
|
+
|
235
|
+
# Load configuration from YAML
|
236
|
+
config = load_config()
|
237
|
+
memory_config = config.get('memory_management', {})
|
238
|
+
|
239
|
+
# Get Redis config from YAML if not provided
|
240
|
+
if redis_config is None:
|
241
|
+
redis_config = get_config_section('redis', config)
|
242
|
+
|
243
|
+
self.redis_config = redis_config.copy()
|
244
|
+
|
245
|
+
# Ensure safe Redis configuration
|
246
|
+
self.redis_config.update({
|
247
|
+
'decode_responses': True,
|
248
|
+
'encoding': 'utf-8',
|
249
|
+
'encoding_errors': 'replace',
|
250
|
+
'socket_timeout': 10,
|
251
|
+
'socket_connect_timeout': 10,
|
252
|
+
'retry_on_timeout': True
|
253
|
+
})
|
254
|
+
|
255
|
+
# Initialize components from config
|
256
|
+
compression_config = memory_config.get('compression', {})
|
257
|
+
self.compression_manager = CompressionManager(
|
258
|
+
enabled=compression_config.get('enabled', True),
|
259
|
+
algorithm=compression_config.get('algorithm', 'lz4'),
|
260
|
+
compression_level=compression_config.get('compression_level', 1)
|
261
|
+
)
|
262
|
+
|
263
|
+
cache_config = memory_config.get('cache', {})
|
264
|
+
self.cache = IntelligentCache(
|
265
|
+
enabled=cache_config.get('enabled', True),
|
266
|
+
max_size=cache_config.get('max_size', 1000),
|
267
|
+
ttl_seconds=cache_config.get('ttl_seconds', 300)
|
268
|
+
)
|
269
|
+
|
270
|
+
# Statistics
|
271
|
+
self.stats = MemoryStats()
|
272
|
+
|
273
|
+
# Initialize Redis connection
|
274
|
+
if not REDIS_AVAILABLE:
|
275
|
+
raise ImportError("Redis package is required but not installed")
|
276
|
+
|
277
|
+
try:
|
278
|
+
self.redis_client = redis.Redis(**self.redis_config)
|
279
|
+
self.redis_client.ping()
|
280
|
+
self.available = True
|
281
|
+
except Exception as e:
|
282
|
+
raise ConnectionError(f"Failed to connect to Redis: {e}")
|
283
|
+
|
284
|
+
def _get_primary_identifier(self, session_id: str = None, conversation_id: str = None) -> str:
|
285
|
+
"""
|
286
|
+
FIXED: Get consistent primary identifier for key generation
|
287
|
+
Priority: conversation_id > session_id > agent_id
|
288
|
+
"""
|
289
|
+
if conversation_id:
|
290
|
+
return conversation_id
|
291
|
+
elif session_id:
|
292
|
+
return session_id
|
293
|
+
else:
|
294
|
+
return self.agent_id
|
295
|
+
|
296
|
+
def _get_message_key(self, session_id: str = None, conversation_id: str = None) -> str:
|
297
|
+
"""
|
298
|
+
FIXED: Generate consistent message key
|
299
|
+
Uses the same primary identifier logic for both storage and retrieval
|
300
|
+
"""
|
301
|
+
primary_id = self._get_primary_identifier(session_id, conversation_id)
|
302
|
+
|
303
|
+
# Always use session: prefix for session/conversation IDs
|
304
|
+
if primary_id != self.agent_id:
|
305
|
+
return f"session:{primary_id}:messages"
|
306
|
+
else:
|
307
|
+
# Fallback to agent-based key only when no session info
|
308
|
+
return f"agent:{primary_id}:messages"
|
309
|
+
|
310
|
+
def _get_context_key(self, session_id: str = None, conversation_id: str = None) -> str:
|
311
|
+
"""
|
312
|
+
FIXED: Generate consistent context key
|
313
|
+
Uses the same primary identifier logic
|
314
|
+
"""
|
315
|
+
primary_id = self._get_primary_identifier(session_id, conversation_id)
|
316
|
+
|
317
|
+
if primary_id != self.agent_id:
|
318
|
+
return f"session:{primary_id}:context"
|
319
|
+
else:
|
320
|
+
return f"agent:{primary_id}:context"
|
321
|
+
|
322
|
+
def _safe_serialize(self, obj: Any) -> str:
|
323
|
+
"""Safely serialize object to JSON with UTF-8 handling"""
|
324
|
+
try:
|
325
|
+
json_str = json.dumps(obj, ensure_ascii=True, default=str)
|
326
|
+
return self.compression_manager.compress(json_str)
|
327
|
+
except Exception as e:
|
328
|
+
logging.error(f"Serialization error: {e}")
|
329
|
+
return json.dumps({"error": "serialization_failed", "original_type": str(type(obj))})
|
330
|
+
|
331
|
+
def _safe_deserialize(self, data: str) -> Any:
|
332
|
+
"""Safely deserialize JSON data"""
|
333
|
+
try:
|
334
|
+
if isinstance(data, bytes):
|
335
|
+
data = data.decode('utf-8', errors='replace')
|
336
|
+
|
337
|
+
decompressed_data = self.compression_manager.decompress(str(data))
|
338
|
+
return json.loads(decompressed_data)
|
339
|
+
except Exception as e:
|
340
|
+
logging.error(f"Deserialization error: {e}")
|
341
|
+
return {"error": "deserialization_failed", "data": str(data)[:100]}
|
342
|
+
|
343
|
+
def store_message(self, message):
|
344
|
+
"""FIXED: Store message with consistent key generation"""
|
345
|
+
try:
|
346
|
+
# Extract session/conversation info from message
|
347
|
+
session_id = getattr(message, 'session_id', None)
|
348
|
+
conversation_id = getattr(message, 'conversation_id', None)
|
349
|
+
|
350
|
+
# FIXED: Use consistent key generation
|
351
|
+
key = self._get_message_key(session_id, conversation_id)
|
352
|
+
|
353
|
+
message_data = self._safe_serialize(message.to_dict() if hasattr(message, 'to_dict') else message)
|
354
|
+
|
355
|
+
self.redis_client.lpush(key, message_data)
|
356
|
+
self.redis_client.expire(key, 30 * 24 * 3600) # 30 days TTL
|
357
|
+
|
358
|
+
# Cache the latest message
|
359
|
+
self.cache.set(f"recent_msg:{key}", message_data)
|
360
|
+
self.stats.total_operations += 1
|
361
|
+
|
362
|
+
# Enhanced debug logging
|
363
|
+
logging.debug(f"STORED message - Key: {key}")
|
364
|
+
logging.debug(f" session_id: {session_id}, conversation_id: {conversation_id}")
|
365
|
+
logging.debug(f" primary_id: {self._get_primary_identifier(session_id, conversation_id)}")
|
366
|
+
|
367
|
+
except Exception as e:
|
368
|
+
logging.error(f"Error storing message: {e}")
|
369
|
+
self.stats.error_count += 1
|
370
|
+
|
371
|
+
def get_recent_messages(self, limit: int = 10, conversation_id: Optional[str] = None):
|
372
|
+
"""FIXED: Get recent messages with proper message ordering and error handling"""
|
373
|
+
try:
|
374
|
+
# Generate key using fixed logic
|
375
|
+
key = self._get_message_key(conversation_id, conversation_id)
|
376
|
+
|
377
|
+
# Enhanced debug logging
|
378
|
+
logging.debug(f"RETRIEVING MESSAGES from key: {key}")
|
379
|
+
logging.debug(f" conversation_id: {conversation_id}, limit: {limit}")
|
380
|
+
|
381
|
+
# Check if key exists
|
382
|
+
key_exists = self.redis_client.exists(key)
|
383
|
+
if not key_exists:
|
384
|
+
logging.debug(f" Key {key} does not exist")
|
385
|
+
return []
|
386
|
+
|
387
|
+
total_messages = self.redis_client.llen(key)
|
388
|
+
logging.debug(f" Total messages in Redis: {total_messages}")
|
389
|
+
|
390
|
+
# Skip cache and go directly to Redis
|
391
|
+
message_data_list = self.redis_client.lrange(key, 0, limit - 1)
|
392
|
+
logging.debug(f" Retrieved {len(message_data_list)} raw items from Redis")
|
393
|
+
|
394
|
+
messages = []
|
395
|
+
|
396
|
+
# Process all messages (don't reverse yet)
|
397
|
+
for i, message_data in enumerate(message_data_list):
|
398
|
+
try:
|
399
|
+
logging.debug(f" Processing message {i + 1}: {str(message_data)[:50]}...")
|
400
|
+
|
401
|
+
# Deserialize message
|
402
|
+
data = self._safe_deserialize(message_data)
|
403
|
+
|
404
|
+
if isinstance(data, dict) and 'content' in data:
|
405
|
+
logging.debug(
|
406
|
+
f" ✅ Valid message: {data.get('message_type')} - {data.get('content')[:30]}...")
|
407
|
+
messages.append(data)
|
408
|
+
else:
|
409
|
+
logging.warning(f" ⚠️ Invalid message format: {type(data)}")
|
410
|
+
|
411
|
+
except Exception as e:
|
412
|
+
logging.error(f" ❌ Error parsing message {i + 1}: {e}")
|
413
|
+
continue
|
414
|
+
|
415
|
+
# CRITICAL FIX: Reverse to get chronological order (oldest first)
|
416
|
+
# LPUSH stores newest first, so we need to reverse for proper conversation flow
|
417
|
+
messages.reverse()
|
418
|
+
|
419
|
+
logging.debug(f" ✅ Returning {len(messages)} messages in chronological order")
|
420
|
+
|
421
|
+
self.stats.total_operations += 1
|
422
|
+
return messages
|
423
|
+
|
424
|
+
except Exception as e:
|
425
|
+
logging.error(f"Error retrieving messages: {e}")
|
426
|
+
self.stats.error_count += 1
|
427
|
+
return []
|
428
|
+
|
429
|
+
def store_context(self, key: str, value: Any, conversation_id: Optional[str] = None):
|
430
|
+
"""FIXED: Store context with consistent keys"""
|
431
|
+
try:
|
432
|
+
redis_key = self._get_context_key(conversation_id, conversation_id)
|
433
|
+
|
434
|
+
value_json = self._safe_serialize(value)
|
435
|
+
|
436
|
+
self.redis_client.hset(redis_key, key, value_json)
|
437
|
+
self.redis_client.expire(redis_key, 30 * 24 * 3600) # 30 days TTL
|
438
|
+
|
439
|
+
self.cache.set(f"ctx:{redis_key}:{key}", value)
|
440
|
+
self.stats.total_operations += 1
|
441
|
+
|
442
|
+
except Exception as e:
|
443
|
+
logging.error(f"Error storing context: {e}")
|
444
|
+
self.stats.error_count += 1
|
445
|
+
|
446
|
+
def get_context(self, key: str, conversation_id: Optional[str] = None):
|
447
|
+
"""FIXED: Get context with consistent keys"""
|
448
|
+
try:
|
449
|
+
redis_key = self._get_context_key(conversation_id, conversation_id)
|
450
|
+
|
451
|
+
cache_key = f"ctx:{redis_key}:{key}"
|
452
|
+
cached_value = self.cache.get(cache_key)
|
453
|
+
if cached_value is not None:
|
454
|
+
self.stats.cache_hits += 1
|
455
|
+
return cached_value
|
456
|
+
|
457
|
+
self.stats.cache_misses += 1
|
458
|
+
|
459
|
+
value_str = self.redis_client.hget(redis_key, key)
|
460
|
+
if value_str:
|
461
|
+
value = self._safe_deserialize(value_str)
|
462
|
+
self.cache.set(cache_key, value)
|
463
|
+
self.stats.total_operations += 1
|
464
|
+
return value
|
465
|
+
|
466
|
+
return None
|
467
|
+
|
468
|
+
except Exception as e:
|
469
|
+
logging.error(f"Error retrieving context: {e}")
|
470
|
+
self.stats.error_count += 1
|
471
|
+
return None
|
472
|
+
|
473
|
+
def clear_memory(self, conversation_id: Optional[str] = None):
|
474
|
+
"""FIXED: Clear memory with consistent keys"""
|
475
|
+
try:
|
476
|
+
if conversation_id:
|
477
|
+
# Clear specific session
|
478
|
+
message_key = self._get_message_key(conversation_id, conversation_id)
|
479
|
+
context_key = self._get_context_key(conversation_id, conversation_id)
|
480
|
+
deleted_count = self.redis_client.delete(message_key, context_key)
|
481
|
+
|
482
|
+
logging.debug(f"Cleared memory for conversation {conversation_id}: {deleted_count} keys deleted")
|
483
|
+
else:
|
484
|
+
# Clear all agent keys
|
485
|
+
agent_pattern = f"agent:{self.agent_id}:*"
|
486
|
+
session_pattern = f"session:*"
|
487
|
+
|
488
|
+
agent_keys = self.redis_client.keys(agent_pattern)
|
489
|
+
session_keys = self.redis_client.keys(session_pattern)
|
490
|
+
|
491
|
+
all_keys = agent_keys + session_keys
|
492
|
+
if all_keys:
|
493
|
+
deleted_count = self.redis_client.delete(*all_keys)
|
494
|
+
else:
|
495
|
+
deleted_count = 0
|
496
|
+
|
497
|
+
logging.debug(f"Cleared all memory: {deleted_count} keys deleted")
|
498
|
+
|
499
|
+
self.cache.clear()
|
500
|
+
self.stats.total_operations += 1
|
501
|
+
|
502
|
+
except Exception as e:
|
503
|
+
logging.error(f"Error clearing memory: {e}")
|
504
|
+
self.stats.error_count += 1
|
505
|
+
|
506
|
+
def get_stats(self) -> MemoryStats:
|
507
|
+
"""Get memory usage statistics"""
|
508
|
+
try:
|
509
|
+
info = self.redis_client.info('memory')
|
510
|
+
self.stats.redis_memory_usage_bytes = info.get('used_memory', 0)
|
511
|
+
self.stats.local_cache_size = len(self.cache.cache)
|
512
|
+
self.stats.cache_hits += self.cache.stats['hits']
|
513
|
+
self.stats.cache_misses += self.cache.stats['misses']
|
514
|
+
self.stats.compression_savings_bytes = self.compression_manager.stats['bytes_saved']
|
515
|
+
except Exception as e:
|
516
|
+
logging.error(f"Error getting stats: {e}")
|
517
|
+
|
518
|
+
return self.stats
|
519
|
+
|
520
|
+
def debug_session_keys(self, session_id: str = None, conversation_id: str = None) -> Dict[str, Any]:
|
521
|
+
"""
|
522
|
+
NEW: Debug method to inspect Redis keys for a session
|
523
|
+
Useful for troubleshooting key consistency issues
|
524
|
+
"""
|
525
|
+
try:
|
526
|
+
keys_to_check = []
|
527
|
+
|
528
|
+
# Check all possible key combinations
|
529
|
+
if conversation_id:
|
530
|
+
keys_to_check.extend([
|
531
|
+
f"session:{conversation_id}:messages",
|
532
|
+
f"session:{conversation_id}:context"
|
533
|
+
])
|
534
|
+
|
535
|
+
if session_id and session_id != conversation_id:
|
536
|
+
keys_to_check.extend([
|
537
|
+
f"session:{session_id}:messages",
|
538
|
+
f"session:{session_id}:context"
|
539
|
+
])
|
540
|
+
|
541
|
+
# Agent fallback keys
|
542
|
+
keys_to_check.extend([
|
543
|
+
f"agent:{self.agent_id}:messages",
|
544
|
+
f"agent:{self.agent_id}:context"
|
545
|
+
])
|
546
|
+
|
547
|
+
result = {
|
548
|
+
'session_id': session_id,
|
549
|
+
'conversation_id': conversation_id,
|
550
|
+
'agent_id': self.agent_id,
|
551
|
+
'primary_identifier': self._get_primary_identifier(session_id, conversation_id),
|
552
|
+
'message_key': self._get_message_key(session_id, conversation_id),
|
553
|
+
'context_key': self._get_context_key(session_id, conversation_id),
|
554
|
+
'keys_checked': len(keys_to_check),
|
555
|
+
'key_status': {}
|
556
|
+
}
|
557
|
+
|
558
|
+
for key in keys_to_check:
|
559
|
+
exists = self.redis_client.exists(key)
|
560
|
+
if exists:
|
561
|
+
key_type = self.redis_client.type(key)
|
562
|
+
if key_type == 'list':
|
563
|
+
length = self.redis_client.llen(key)
|
564
|
+
result['key_status'][key] = {
|
565
|
+
'exists': True,
|
566
|
+
'type': key_type,
|
567
|
+
'length': length
|
568
|
+
}
|
569
|
+
elif key_type == 'hash':
|
570
|
+
length = self.redis_client.hlen(key)
|
571
|
+
result['key_status'][key] = {
|
572
|
+
'exists': True,
|
573
|
+
'type': key_type,
|
574
|
+
'length': length
|
575
|
+
}
|
576
|
+
else:
|
577
|
+
result['key_status'][key] = {
|
578
|
+
'exists': True,
|
579
|
+
'type': key_type
|
580
|
+
}
|
581
|
+
else:
|
582
|
+
result['key_status'][key] = {'exists': False}
|
583
|
+
|
584
|
+
return result
|
585
|
+
|
586
|
+
except Exception as e:
|
587
|
+
return {'error': str(e)}
|
588
|
+
|
589
|
+
def debug_keys(self, pattern: str = "*") -> Dict[str, Any]:
|
590
|
+
"""Enhanced debug method to inspect Redis keys"""
|
591
|
+
try:
|
592
|
+
keys = self.redis_client.keys(pattern)
|
593
|
+
result = {
|
594
|
+
'total_keys': len(keys),
|
595
|
+
'keys': []
|
596
|
+
}
|
597
|
+
|
598
|
+
for key in keys[:20]: # Limit to first 20 keys
|
599
|
+
key_str = key.decode() if isinstance(key, bytes) else str(key)
|
600
|
+
key_type = self.redis_client.type(key_str)
|
601
|
+
|
602
|
+
if key_type == 'list':
|
603
|
+
length = self.redis_client.llen(key_str)
|
604
|
+
result['keys'].append({
|
605
|
+
'key': key_str,
|
606
|
+
'type': key_type,
|
607
|
+
'length': length
|
608
|
+
})
|
609
|
+
elif key_type == 'hash':
|
610
|
+
length = self.redis_client.hlen(key_str)
|
611
|
+
result['keys'].append({
|
612
|
+
'key': key_str,
|
613
|
+
'type': key_type,
|
614
|
+
'length': length
|
615
|
+
})
|
616
|
+
else:
|
617
|
+
result['keys'].append({
|
618
|
+
'key': key_str,
|
619
|
+
'type': key_type
|
620
|
+
})
|
621
|
+
|
622
|
+
return result
|
623
|
+
|
624
|
+
except Exception as e:
|
625
|
+
logging.error(f"Error debugging keys: {e}")
|
626
|
+
return {'error': str(e)}
|
627
|
+
|
628
|
+
|
629
|
+
def create_redis_memory_manager(agent_id: str, redis_config: Dict[str, Any] = None):
|
630
|
+
"""
|
631
|
+
Create Redis memory manager with configuration from YAML.
|
632
|
+
|
633
|
+
Args:
|
634
|
+
agent_id: Unique identifier for the agent
|
635
|
+
redis_config: Optional Redis configuration. If None, loads from YAML.
|
636
|
+
|
637
|
+
Returns:
|
638
|
+
RedisMemoryManager instance
|
639
|
+
"""
|
640
|
+
return RedisMemoryManager(agent_id, redis_config)
|
@@ -0,0 +1,8 @@
|
|
1
|
+
# ambivo_agents/executors/__init__.py
|
2
|
+
from .docker_executor import DockerCodeExecutor
|
3
|
+
from .media_executor import MediaDockerExecutor
|
4
|
+
from .youtube_executor import YouTubeDockerExecutor
|
5
|
+
|
6
|
+
__all__ = ["DockerCodeExecutor", "MediaDockerExecutor", "YouTubeDockerExecutor"]
|
7
|
+
|
8
|
+
|