agmem 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
memvcs/core/agents.py ADDED
@@ -0,0 +1,411 @@
1
+ """
2
+ Memory Agents - Automated memory management tasks.
3
+
4
+ This module provides:
5
+ - Automated memory consolidation
6
+ - Cleanup and archival agents
7
+ - Pattern detection and alerts
8
+ - Proactive memory maintenance
9
+ """
10
+
11
+ import hashlib
12
+ import json
13
+ from dataclasses import dataclass, field
14
+ from datetime import datetime, timedelta, timezone
15
+ from pathlib import Path
16
+ from typing import Any, Callable, Dict, List, Optional, Set
17
+
18
+
19
+ @dataclass
20
+ class AgentTask:
21
+ """A task for a memory agent to execute."""
22
+
23
+ task_id: str
24
+ task_type: str # "consolidate", "cleanup", "archive", "alert"
25
+ target: str # Path or pattern
26
+ priority: int = 1 # 1=low, 5=high
27
+ scheduled_at: Optional[str] = None
28
+ completed_at: Optional[str] = None
29
+ result: Optional[Dict[str, Any]] = None
30
+
31
+ def to_dict(self) -> Dict[str, Any]:
32
+ return {
33
+ "task_id": self.task_id,
34
+ "task_type": self.task_type,
35
+ "target": self.target,
36
+ "priority": self.priority,
37
+ "scheduled_at": self.scheduled_at,
38
+ "completed_at": self.completed_at,
39
+ "result": self.result,
40
+ }
41
+
42
+
43
+ @dataclass
44
+ class AgentRule:
45
+ """A rule that triggers agent actions."""
46
+
47
+ rule_id: str
48
+ name: str
49
+ condition: str # Type of condition
50
+ threshold: Any # Threshold value
51
+ action: str # Action to take
52
+ enabled: bool = True
53
+
54
+ def to_dict(self) -> Dict[str, Any]:
55
+ return {
56
+ "rule_id": self.rule_id,
57
+ "name": self.name,
58
+ "condition": self.condition,
59
+ "threshold": self.threshold,
60
+ "action": self.action,
61
+ "enabled": self.enabled,
62
+ }
63
+
64
+
65
+ class ConsolidationAgent:
66
+ """Agent that consolidates fragmented memories."""
67
+
68
+ def __init__(self, repo_root: Path):
69
+ self.repo_root = Path(repo_root)
70
+
71
+ def find_consolidation_candidates(
72
+ self, min_similarity: float = 0.7, max_age_days: int = 30
73
+ ) -> List[Dict[str, Any]]:
74
+ """Find memories that could be consolidated."""
75
+ from memvcs.core.repository import Repository
76
+
77
+ candidates = []
78
+ try:
79
+ repo = Repository(self.repo_root)
80
+ current_dir = repo.current_dir
81
+
82
+ # Group files by topic/similarity
83
+ files_by_prefix: Dict[str, List[Path]] = {}
84
+ for filepath in current_dir.rglob("*.md"):
85
+ if filepath.is_file():
86
+ # Group by directory + first word of filename
87
+ prefix = filepath.parent.name + "/" + filepath.stem.split("-")[0]
88
+ if prefix not in files_by_prefix:
89
+ files_by_prefix[prefix] = []
90
+ files_by_prefix[prefix].append(filepath)
91
+
92
+ # Find groups with multiple files
93
+ for prefix, files in files_by_prefix.items():
94
+ if len(files) >= 3:
95
+ candidates.append(
96
+ {
97
+ "prefix": prefix,
98
+ "file_count": len(files),
99
+ "files": [str(f.relative_to(current_dir)) for f in files[:5]],
100
+ "suggestion": f"Consider consolidating {len(files)} related files",
101
+ }
102
+ )
103
+ except Exception:
104
+ pass
105
+
106
+ return candidates[:20]
107
+
108
+ def consolidate(self, file_paths: List[str], output_path: str) -> Dict[str, Any]:
109
+ """Consolidate multiple memories into one."""
110
+ from memvcs.core.repository import Repository
111
+
112
+ try:
113
+ repo = Repository(self.repo_root)
114
+ current_dir = repo.current_dir
115
+
116
+ combined_content = []
117
+ combined_content.append(f"# Consolidated Memory\n")
118
+ combined_content.append(f"Created: {datetime.now(timezone.utc).isoformat()}\n")
119
+ combined_content.append(f"Sources: {len(file_paths)} files\n\n")
120
+
121
+ for path in file_paths:
122
+ full_path = current_dir / path
123
+ if full_path.exists():
124
+ content = full_path.read_text()
125
+ combined_content.append(f"## From: {path}\n\n")
126
+ combined_content.append(content)
127
+ combined_content.append("\n\n---\n\n")
128
+
129
+ # Write consolidated file
130
+ output_full = current_dir / output_path
131
+ output_full.parent.mkdir(parents=True, exist_ok=True)
132
+ output_full.write_text("\n".join(combined_content))
133
+
134
+ return {
135
+ "success": True,
136
+ "output_path": output_path,
137
+ "source_count": len(file_paths),
138
+ }
139
+ except Exception as e:
140
+ return {"success": False, "error": str(e)}
141
+
142
+
143
+ class CleanupAgent:
144
+ """Agent that identifies and cleans up old/unused memories."""
145
+
146
+ def __init__(self, repo_root: Path):
147
+ self.repo_root = Path(repo_root)
148
+
149
+ def find_cleanup_candidates(
150
+ self, max_age_days: int = 90, min_size_bytes: int = 0
151
+ ) -> List[Dict[str, Any]]:
152
+ """Find memories that are candidates for cleanup."""
153
+ from memvcs.core.repository import Repository
154
+
155
+ candidates = []
156
+ now = datetime.now(timezone.utc)
157
+ cutoff = now - timedelta(days=max_age_days)
158
+
159
+ try:
160
+ repo = Repository(self.repo_root)
161
+ current_dir = repo.current_dir
162
+
163
+ for filepath in current_dir.rglob("*"):
164
+ if not filepath.is_file():
165
+ continue
166
+
167
+ stat = filepath.stat()
168
+ mtime = datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc)
169
+
170
+ if mtime < cutoff:
171
+ rel_path = str(filepath.relative_to(current_dir))
172
+ candidates.append(
173
+ {
174
+ "path": rel_path,
175
+ "size_bytes": stat.st_size,
176
+ "last_modified": mtime.isoformat(),
177
+ "age_days": (now - mtime).days,
178
+ }
179
+ )
180
+ except Exception:
181
+ pass
182
+
183
+ # Sort by age
184
+ candidates.sort(key=lambda x: x["age_days"], reverse=True)
185
+ return candidates[:50]
186
+
187
+ def find_duplicates(self) -> List[Dict[str, Any]]:
188
+ """Find duplicate memories."""
189
+ from memvcs.core.repository import Repository
190
+
191
+ try:
192
+ repo = Repository(self.repo_root)
193
+ current_dir = repo.current_dir
194
+
195
+ hash_to_files: Dict[str, List[str]] = {}
196
+
197
+ for filepath in current_dir.rglob("*"):
198
+ if filepath.is_file():
199
+ try:
200
+ content_hash = hashlib.sha256(filepath.read_bytes()).hexdigest()[:16]
201
+
202
+ rel_path = str(filepath.relative_to(current_dir))
203
+ if content_hash not in hash_to_files:
204
+ hash_to_files[content_hash] = []
205
+ hash_to_files[content_hash].append(rel_path)
206
+ except Exception:
207
+ pass
208
+
209
+ duplicates = []
210
+ for hash_val, files in hash_to_files.items():
211
+ if len(files) > 1:
212
+ duplicates.append(
213
+ {
214
+ "hash": hash_val,
215
+ "files": files,
216
+ "count": len(files),
217
+ }
218
+ )
219
+
220
+ return duplicates
221
+ except Exception:
222
+ return []
223
+
224
+ def archive_old_memories(
225
+ self, paths: List[str], archive_dir: str = "archive"
226
+ ) -> Dict[str, Any]:
227
+ """Move old memories to archive."""
228
+ from memvcs.core.repository import Repository
229
+
230
+ try:
231
+ repo = Repository(self.repo_root)
232
+ current_dir = repo.current_dir
233
+ archive_path = current_dir / archive_dir
234
+
235
+ archived = []
236
+ for path in paths:
237
+ source = current_dir / path
238
+ if source.exists():
239
+ dest = archive_path / path
240
+ dest.parent.mkdir(parents=True, exist_ok=True)
241
+ source.rename(dest)
242
+ archived.append(path)
243
+
244
+ return {
245
+ "success": True,
246
+ "archived_count": len(archived),
247
+ "archived_paths": archived,
248
+ }
249
+ except Exception as e:
250
+ return {"success": False, "error": str(e)}
251
+
252
+
253
+ class AlertAgent:
254
+ """Agent that monitors and alerts on memory patterns."""
255
+
256
+ def __init__(self, repo_root: Path):
257
+ self.repo_root = Path(repo_root)
258
+ self.mem_dir = self.repo_root / ".mem"
259
+ self.alerts_file = self.mem_dir / "alerts.json"
260
+ self._alerts: List[Dict[str, Any]] = []
261
+ self._load()
262
+
263
+ def _load(self) -> None:
264
+ """Load alerts from disk."""
265
+ if self.alerts_file.exists():
266
+ try:
267
+ data = json.loads(self.alerts_file.read_text())
268
+ self._alerts = data.get("alerts", [])
269
+ except Exception:
270
+ pass
271
+
272
+ def _save(self) -> None:
273
+ """Save alerts to disk."""
274
+ self.mem_dir.mkdir(parents=True, exist_ok=True)
275
+ self.alerts_file.write_text(json.dumps({"alerts": self._alerts}, indent=2))
276
+
277
+ def add_alert(
278
+ self,
279
+ alert_type: str,
280
+ message: str,
281
+ severity: str = "info",
282
+ data: Optional[Dict[str, Any]] = None,
283
+ ) -> Dict[str, Any]:
284
+ """Add a new alert."""
285
+ alert = {
286
+ "id": hashlib.sha256(
287
+ f"{alert_type}{message}{datetime.now().isoformat()}".encode()
288
+ ).hexdigest()[:8],
289
+ "type": alert_type,
290
+ "message": message,
291
+ "severity": severity,
292
+ "created_at": datetime.now(timezone.utc).isoformat(),
293
+ "acknowledged": False,
294
+ "data": data or {},
295
+ }
296
+ self._alerts.append(alert)
297
+ self._save()
298
+ return alert
299
+
300
+ def get_alerts(
301
+ self, unacknowledged_only: bool = False, limit: int = 50
302
+ ) -> List[Dict[str, Any]]:
303
+ """Get alerts."""
304
+ alerts = self._alerts
305
+ if unacknowledged_only:
306
+ alerts = [a for a in alerts if not a.get("acknowledged")]
307
+ return alerts[-limit:]
308
+
309
+ def acknowledge_alert(self, alert_id: str) -> bool:
310
+ """Acknowledge an alert."""
311
+ for alert in self._alerts:
312
+ if alert["id"] == alert_id:
313
+ alert["acknowledged"] = True
314
+ alert["acknowledged_at"] = datetime.now(timezone.utc).isoformat()
315
+ self._save()
316
+ return True
317
+ return False
318
+
319
+ def check_storage_usage(self, threshold_mb: float = 100) -> Optional[Dict[str, Any]]:
320
+ """Check if storage usage exceeds threshold."""
321
+ from memvcs.core.repository import Repository
322
+
323
+ try:
324
+ repo = Repository(self.repo_root)
325
+ current_dir = repo.current_dir
326
+
327
+ total_size = 0
328
+ for filepath in current_dir.rglob("*"):
329
+ if filepath.is_file():
330
+ total_size += filepath.stat().st_size
331
+
332
+ size_mb = total_size / (1024 * 1024)
333
+ if size_mb > threshold_mb:
334
+ return self.add_alert(
335
+ alert_type="storage",
336
+ message=f"Storage usage ({size_mb:.1f}MB) exceeds threshold ({threshold_mb}MB)",
337
+ severity="warning",
338
+ data={"current_mb": size_mb, "threshold_mb": threshold_mb},
339
+ )
340
+ except Exception:
341
+ pass
342
+ return None
343
+
344
+
345
+ class MemoryAgentManager:
346
+ """Manages all memory agents."""
347
+
348
+ def __init__(self, repo_root: Path):
349
+ self.repo_root = Path(repo_root)
350
+ self.consolidation = ConsolidationAgent(repo_root)
351
+ self.cleanup = CleanupAgent(repo_root)
352
+ self.alert = AlertAgent(repo_root)
353
+ self.rules: List[AgentRule] = []
354
+ self.task_queue: List[AgentTask] = []
355
+
356
+ def run_health_check(self) -> Dict[str, Any]:
357
+ """Run a comprehensive health check."""
358
+ results = {
359
+ "timestamp": datetime.now(timezone.utc).isoformat(),
360
+ "checks": {},
361
+ }
362
+
363
+ # Check for consolidation candidates
364
+ consolidation_candidates = self.consolidation.find_consolidation_candidates()
365
+ results["checks"]["consolidation"] = {
366
+ "candidate_count": len(consolidation_candidates),
367
+ "candidates": consolidation_candidates[:5],
368
+ }
369
+
370
+ # Check for cleanup candidates
371
+ cleanup_candidates = self.cleanup.find_cleanup_candidates(max_age_days=60)
372
+ results["checks"]["cleanup"] = {
373
+ "candidate_count": len(cleanup_candidates),
374
+ "candidates": cleanup_candidates[:5],
375
+ }
376
+
377
+ # Check for duplicates
378
+ duplicates = self.cleanup.find_duplicates()
379
+ results["checks"]["duplicates"] = {
380
+ "duplicate_groups": len(duplicates),
381
+ "duplicates": duplicates[:5],
382
+ }
383
+
384
+ # Check storage
385
+ storage_alert = self.alert.check_storage_usage(threshold_mb=50)
386
+ results["checks"]["storage"] = {
387
+ "alert": storage_alert is not None,
388
+ }
389
+
390
+ # Get recent alerts
391
+ alerts = self.alert.get_alerts(unacknowledged_only=True, limit=10)
392
+ results["alerts"] = alerts
393
+
394
+ return results
395
+
396
+
397
+ # --- Dashboard Helper ---
398
+
399
+
400
+ def get_agent_dashboard(repo_root: Path) -> Dict[str, Any]:
401
+ """Get data for memory agent dashboard."""
402
+ manager = MemoryAgentManager(repo_root)
403
+ health = manager.run_health_check()
404
+
405
+ return {
406
+ "health_check": health,
407
+ "consolidation_candidates": health["checks"]["consolidation"]["candidate_count"],
408
+ "cleanup_candidates": health["checks"]["cleanup"]["candidate_count"],
409
+ "duplicate_groups": health["checks"]["duplicates"]["duplicate_groups"],
410
+ "unacknowledged_alerts": len(health.get("alerts", [])),
411
+ }