agmem 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {agmem-0.2.0.dist-info → agmem-0.3.0.dist-info}/METADATA +338 -26
- {agmem-0.2.0.dist-info → agmem-0.3.0.dist-info}/RECORD +32 -16
- memvcs/__init__.py +1 -1
- memvcs/cli.py +1 -1
- memvcs/coordinator/server.py +18 -2
- memvcs/core/agents.py +411 -0
- memvcs/core/archaeology.py +410 -0
- memvcs/core/collaboration.py +435 -0
- memvcs/core/compliance.py +427 -0
- memvcs/core/compression_metrics.py +248 -0
- memvcs/core/confidence.py +379 -0
- memvcs/core/daemon.py +735 -0
- memvcs/core/delta.py +45 -23
- memvcs/core/distiller.py +3 -12
- memvcs/core/fast_similarity.py +404 -0
- memvcs/core/federated.py +13 -2
- memvcs/core/gardener.py +8 -68
- memvcs/core/pack.py +1 -1
- memvcs/core/privacy_validator.py +187 -0
- memvcs/core/private_search.py +327 -0
- memvcs/core/protocol_builder.py +198 -0
- memvcs/core/search_index.py +538 -0
- memvcs/core/semantic_graph.py +388 -0
- memvcs/core/session.py +520 -0
- memvcs/core/timetravel.py +430 -0
- memvcs/integrations/mcp_server.py +775 -4
- memvcs/integrations/web_ui/server.py +424 -0
- memvcs/integrations/web_ui/websocket.py +223 -0
- {agmem-0.2.0.dist-info → agmem-0.3.0.dist-info}/WHEEL +0 -0
- {agmem-0.2.0.dist-info → agmem-0.3.0.dist-info}/entry_points.txt +0 -0
- {agmem-0.2.0.dist-info → agmem-0.3.0.dist-info}/licenses/LICENSE +0 -0
- {agmem-0.2.0.dist-info → agmem-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,430 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Time-Travel Debugging - Navigate memory history with temporal expressions.
|
|
3
|
+
|
|
4
|
+
This module provides:
|
|
5
|
+
- Time expression parsing (relative dates, ranges)
|
|
6
|
+
- Temporal checkout (view memory at any point in time)
|
|
7
|
+
- Knowledge snapshots with export
|
|
8
|
+
- Timeline navigation
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import re
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from datetime import datetime, timedelta, timezone
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Any, Dict, Generator, List, Optional, Tuple, Union
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class TimeExpression:
|
|
21
|
+
"""Parsed time expression."""
|
|
22
|
+
|
|
23
|
+
expression: str
|
|
24
|
+
resolved_time: datetime
|
|
25
|
+
is_relative: bool
|
|
26
|
+
is_range: bool
|
|
27
|
+
range_end: Optional[datetime] = None
|
|
28
|
+
|
|
29
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
30
|
+
return {
|
|
31
|
+
"expression": self.expression,
|
|
32
|
+
"resolved_time": self.resolved_time.isoformat(),
|
|
33
|
+
"is_relative": self.is_relative,
|
|
34
|
+
"is_range": self.is_range,
|
|
35
|
+
"range_end": self.range_end.isoformat() if self.range_end else None,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class TimeExpressionParser:
|
|
40
|
+
"""Parses natural language time expressions."""
|
|
41
|
+
|
|
42
|
+
RELATIVE_PATTERNS = [
|
|
43
|
+
(r"(\d+)\s*(?:minutes?|min)\s*ago", lambda m: timedelta(minutes=int(m.group(1)))),
|
|
44
|
+
(r"(\d+)\s*(?:hours?|hr)\s*ago", lambda m: timedelta(hours=int(m.group(1)))),
|
|
45
|
+
(r"(\d+)\s*(?:days?)\s*ago", lambda m: timedelta(days=int(m.group(1)))),
|
|
46
|
+
(r"(\d+)\s*(?:weeks?)\s*ago", lambda m: timedelta(weeks=int(m.group(1)))),
|
|
47
|
+
(r"(\d+)\s*(?:months?)\s*ago", lambda m: timedelta(days=int(m.group(1)) * 30)),
|
|
48
|
+
(r"(\d+)\s*(?:years?)\s*ago", lambda m: timedelta(days=int(m.group(1)) * 365)),
|
|
49
|
+
(r"yesterday", lambda m: timedelta(days=1)),
|
|
50
|
+
(r"last\s*week", lambda m: timedelta(weeks=1)),
|
|
51
|
+
(r"last\s*month", lambda m: timedelta(days=30)),
|
|
52
|
+
(r"today", lambda m: timedelta(days=0)),
|
|
53
|
+
(r"now", lambda m: timedelta(seconds=0)),
|
|
54
|
+
]
|
|
55
|
+
|
|
56
|
+
RANGE_PATTERNS = [
|
|
57
|
+
# "between X and Y"
|
|
58
|
+
(r"between\s+(.+?)\s+and\s+(.+)", 2),
|
|
59
|
+
# "from X to Y"
|
|
60
|
+
(r"from\s+(.+?)\s+to\s+(.+)", 2),
|
|
61
|
+
# "last N days/weeks"
|
|
62
|
+
(r"last\s+(\d+)\s+days?", lambda n: (int(n), "days")),
|
|
63
|
+
(r"last\s+(\d+)\s+weeks?", lambda n: (int(n), "weeks")),
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
def parse(self, expression: str) -> TimeExpression:
|
|
67
|
+
"""Parse a time expression into a TimeExpression object."""
|
|
68
|
+
now = datetime.now(timezone.utc)
|
|
69
|
+
expr_lower = expression.lower().strip()
|
|
70
|
+
|
|
71
|
+
# Check for relative patterns
|
|
72
|
+
for pattern, delta_fn in self.RELATIVE_PATTERNS:
|
|
73
|
+
match = re.match(pattern, expr_lower)
|
|
74
|
+
if match:
|
|
75
|
+
delta = delta_fn(match)
|
|
76
|
+
return TimeExpression(
|
|
77
|
+
expression=expression,
|
|
78
|
+
resolved_time=now - delta,
|
|
79
|
+
is_relative=True,
|
|
80
|
+
is_range=False,
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
# Check for range patterns
|
|
84
|
+
if match := re.match(r"last\s+(\d+)\s+(days?|weeks?|months?)", expr_lower):
|
|
85
|
+
count = int(match.group(1))
|
|
86
|
+
unit = match.group(2).rstrip("s")
|
|
87
|
+
if unit == "day":
|
|
88
|
+
delta = timedelta(days=count)
|
|
89
|
+
elif unit == "week":
|
|
90
|
+
delta = timedelta(weeks=count)
|
|
91
|
+
else:
|
|
92
|
+
delta = timedelta(days=count * 30)
|
|
93
|
+
|
|
94
|
+
return TimeExpression(
|
|
95
|
+
expression=expression,
|
|
96
|
+
resolved_time=now - delta,
|
|
97
|
+
is_relative=True,
|
|
98
|
+
is_range=True,
|
|
99
|
+
range_end=now,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# Try ISO format
|
|
103
|
+
for fmt in ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"]:
|
|
104
|
+
try:
|
|
105
|
+
dt = datetime.strptime(expr_lower, fmt).replace(tzinfo=timezone.utc)
|
|
106
|
+
return TimeExpression(
|
|
107
|
+
expression=expression,
|
|
108
|
+
resolved_time=dt,
|
|
109
|
+
is_relative=False,
|
|
110
|
+
is_range=False,
|
|
111
|
+
)
|
|
112
|
+
except ValueError:
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
# Default to now if unparseable
|
|
116
|
+
return TimeExpression(
|
|
117
|
+
expression=expression,
|
|
118
|
+
resolved_time=now,
|
|
119
|
+
is_relative=False,
|
|
120
|
+
is_range=False,
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
def parse_range(self, start_expr: str, end_expr: str) -> Tuple[datetime, datetime]:
|
|
124
|
+
"""Parse a time range."""
|
|
125
|
+
start = self.parse(start_expr).resolved_time
|
|
126
|
+
end = self.parse(end_expr).resolved_time
|
|
127
|
+
if start > end:
|
|
128
|
+
start, end = end, start
|
|
129
|
+
return start, end
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
@dataclass
|
|
133
|
+
class TemporalSnapshot:
|
|
134
|
+
"""A snapshot of memory state at a point in time."""
|
|
135
|
+
|
|
136
|
+
timestamp: str
|
|
137
|
+
commit_hash: str
|
|
138
|
+
files: Dict[str, str] # path -> content
|
|
139
|
+
metadata: Dict[str, Any]
|
|
140
|
+
|
|
141
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
142
|
+
return {
|
|
143
|
+
"timestamp": self.timestamp,
|
|
144
|
+
"commit_hash": self.commit_hash,
|
|
145
|
+
"file_count": len(self.files),
|
|
146
|
+
"files": list(self.files.keys()),
|
|
147
|
+
"metadata": self.metadata,
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class TemporalNavigator:
|
|
152
|
+
"""Navigate memory history with time-based queries."""
|
|
153
|
+
|
|
154
|
+
def __init__(self, repo_root: Path):
|
|
155
|
+
self.repo_root = Path(repo_root)
|
|
156
|
+
self.parser = TimeExpressionParser()
|
|
157
|
+
|
|
158
|
+
def find_commit_at(self, time_expr: str) -> Optional[Dict[str, Any]]:
|
|
159
|
+
"""Find the commit closest to a given time expression."""
|
|
160
|
+
from memvcs.core.repository import Repository
|
|
161
|
+
|
|
162
|
+
parsed = self.parser.parse(time_expr)
|
|
163
|
+
target_time = parsed.resolved_time
|
|
164
|
+
|
|
165
|
+
try:
|
|
166
|
+
repo = Repository(self.repo_root)
|
|
167
|
+
commits = repo.get_log(max_count=500)
|
|
168
|
+
|
|
169
|
+
best_commit = None
|
|
170
|
+
best_delta = None
|
|
171
|
+
|
|
172
|
+
for commit in commits:
|
|
173
|
+
ts = commit.get("timestamp", "")
|
|
174
|
+
if ts:
|
|
175
|
+
try:
|
|
176
|
+
commit_time = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
177
|
+
delta = abs((commit_time - target_time).total_seconds())
|
|
178
|
+
|
|
179
|
+
# Only consider commits before or at the target time
|
|
180
|
+
if commit_time <= target_time:
|
|
181
|
+
if best_delta is None or delta < best_delta:
|
|
182
|
+
best_delta = delta
|
|
183
|
+
best_commit = commit
|
|
184
|
+
except Exception:
|
|
185
|
+
pass
|
|
186
|
+
|
|
187
|
+
return best_commit
|
|
188
|
+
except Exception:
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
def find_commits_in_range(self, start_expr: str, end_expr: str) -> List[Dict[str, Any]]:
|
|
192
|
+
"""Find all commits within a time range."""
|
|
193
|
+
from memvcs.core.repository import Repository
|
|
194
|
+
|
|
195
|
+
start_time, end_time = self.parser.parse_range(start_expr, end_expr)
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
repo = Repository(self.repo_root)
|
|
199
|
+
commits = repo.get_log(max_count=500)
|
|
200
|
+
|
|
201
|
+
matching = []
|
|
202
|
+
for commit in commits:
|
|
203
|
+
ts = commit.get("timestamp", "")
|
|
204
|
+
if ts:
|
|
205
|
+
try:
|
|
206
|
+
commit_time = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
207
|
+
if start_time <= commit_time <= end_time:
|
|
208
|
+
matching.append(commit)
|
|
209
|
+
except Exception:
|
|
210
|
+
pass
|
|
211
|
+
|
|
212
|
+
return matching
|
|
213
|
+
except Exception:
|
|
214
|
+
return []
|
|
215
|
+
|
|
216
|
+
def get_file_at_time(self, file_path: str, time_expr: str) -> Optional[str]:
|
|
217
|
+
"""Get file content at a specific point in time."""
|
|
218
|
+
commit = self.find_commit_at(time_expr)
|
|
219
|
+
if not commit:
|
|
220
|
+
return None
|
|
221
|
+
|
|
222
|
+
from memvcs.core.repository import Repository
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
repo = Repository(self.repo_root)
|
|
226
|
+
# Get file content from that commit
|
|
227
|
+
# This is a simplified version - full implementation would
|
|
228
|
+
# use object store to reconstruct file from tree
|
|
229
|
+
return repo.get_file_content(file_path, commit["short_hash"])
|
|
230
|
+
except Exception:
|
|
231
|
+
return None
|
|
232
|
+
|
|
233
|
+
def create_snapshot(self, time_expr: str) -> Optional[TemporalSnapshot]:
|
|
234
|
+
"""Create a snapshot of memory at a given time."""
|
|
235
|
+
commit = self.find_commit_at(time_expr)
|
|
236
|
+
if not commit:
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
from memvcs.core.repository import Repository
|
|
240
|
+
|
|
241
|
+
try:
|
|
242
|
+
repo = Repository(self.repo_root)
|
|
243
|
+
files = {}
|
|
244
|
+
|
|
245
|
+
# Get all files from this commit
|
|
246
|
+
for filepath in repo.current_dir.rglob("*"):
|
|
247
|
+
if filepath.is_file():
|
|
248
|
+
try:
|
|
249
|
+
rel_path = str(filepath.relative_to(repo.current_dir))
|
|
250
|
+
content = repo.get_file_content(rel_path, commit["short_hash"])
|
|
251
|
+
if content:
|
|
252
|
+
files[rel_path] = content
|
|
253
|
+
except Exception:
|
|
254
|
+
pass
|
|
255
|
+
|
|
256
|
+
return TemporalSnapshot(
|
|
257
|
+
timestamp=commit.get("timestamp", ""),
|
|
258
|
+
commit_hash=commit.get("short_hash", ""),
|
|
259
|
+
files=files,
|
|
260
|
+
metadata={
|
|
261
|
+
"message": commit.get("message", ""),
|
|
262
|
+
"author": commit.get("author", ""),
|
|
263
|
+
},
|
|
264
|
+
)
|
|
265
|
+
except Exception:
|
|
266
|
+
return None
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class TimelineVisualizer:
|
|
270
|
+
"""Generates timeline data for visualization."""
|
|
271
|
+
|
|
272
|
+
def __init__(self, repo_root: Path):
|
|
273
|
+
self.repo_root = Path(repo_root)
|
|
274
|
+
|
|
275
|
+
def get_activity_timeline(
|
|
276
|
+
self, days: int = 30, granularity: str = "day"
|
|
277
|
+
) -> List[Dict[str, Any]]:
|
|
278
|
+
"""Get activity timeline data."""
|
|
279
|
+
from memvcs.core.repository import Repository
|
|
280
|
+
|
|
281
|
+
try:
|
|
282
|
+
repo = Repository(self.repo_root)
|
|
283
|
+
commits = repo.get_log(max_count=1000)
|
|
284
|
+
|
|
285
|
+
now = datetime.now(timezone.utc)
|
|
286
|
+
cutoff = now - timedelta(days=days)
|
|
287
|
+
|
|
288
|
+
# Group by granularity
|
|
289
|
+
groups: Dict[str, List[Dict[str, Any]]] = {}
|
|
290
|
+
for commit in commits:
|
|
291
|
+
ts = commit.get("timestamp", "")
|
|
292
|
+
if ts:
|
|
293
|
+
try:
|
|
294
|
+
commit_time = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
295
|
+
if commit_time >= cutoff:
|
|
296
|
+
if granularity == "hour":
|
|
297
|
+
key = commit_time.strftime("%Y-%m-%d %H:00")
|
|
298
|
+
elif granularity == "day":
|
|
299
|
+
key = commit_time.strftime("%Y-%m-%d")
|
|
300
|
+
else:
|
|
301
|
+
key = commit_time.strftime("%Y-W%W")
|
|
302
|
+
|
|
303
|
+
if key not in groups:
|
|
304
|
+
groups[key] = []
|
|
305
|
+
groups[key].append(commit)
|
|
306
|
+
except Exception:
|
|
307
|
+
pass
|
|
308
|
+
|
|
309
|
+
# Convert to list
|
|
310
|
+
timeline = []
|
|
311
|
+
for key in sorted(groups.keys()):
|
|
312
|
+
commits_in_group = groups[key]
|
|
313
|
+
timeline.append(
|
|
314
|
+
{
|
|
315
|
+
"period": key,
|
|
316
|
+
"count": len(commits_in_group),
|
|
317
|
+
"commits": [c["short_hash"] for c in commits_in_group[:5]],
|
|
318
|
+
}
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
return timeline
|
|
322
|
+
except Exception:
|
|
323
|
+
return []
|
|
324
|
+
|
|
325
|
+
def get_file_activity_timeline(self, file_path: str, days: int = 90) -> List[Dict[str, Any]]:
|
|
326
|
+
"""Get activity timeline for a specific file."""
|
|
327
|
+
from memvcs.core.repository import Repository
|
|
328
|
+
|
|
329
|
+
try:
|
|
330
|
+
repo = Repository(self.repo_root)
|
|
331
|
+
commits = repo.get_log(max_count=500)
|
|
332
|
+
|
|
333
|
+
now = datetime.now(timezone.utc)
|
|
334
|
+
cutoff = now - timedelta(days=days)
|
|
335
|
+
|
|
336
|
+
# Filter to commits affecting this file
|
|
337
|
+
# This is simplified - full implementation would check tree diffs
|
|
338
|
+
timeline = []
|
|
339
|
+
for commit in commits:
|
|
340
|
+
ts = commit.get("timestamp", "")
|
|
341
|
+
if ts:
|
|
342
|
+
try:
|
|
343
|
+
commit_time = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
344
|
+
if commit_time >= cutoff:
|
|
345
|
+
timeline.append(
|
|
346
|
+
{
|
|
347
|
+
"timestamp": ts,
|
|
348
|
+
"commit": commit["short_hash"],
|
|
349
|
+
"message": commit.get("message", ""),
|
|
350
|
+
}
|
|
351
|
+
)
|
|
352
|
+
except Exception:
|
|
353
|
+
pass
|
|
354
|
+
|
|
355
|
+
return timeline[:50] # Limit results
|
|
356
|
+
except Exception:
|
|
357
|
+
return []
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
class SnapshotExporter:
|
|
361
|
+
"""Exports temporal snapshots in various formats."""
|
|
362
|
+
|
|
363
|
+
def __init__(self, repo_root: Path):
|
|
364
|
+
self.repo_root = Path(repo_root)
|
|
365
|
+
|
|
366
|
+
def export_json(self, snapshot: TemporalSnapshot) -> str:
|
|
367
|
+
"""Export snapshot as JSON."""
|
|
368
|
+
data = {
|
|
369
|
+
"timestamp": snapshot.timestamp,
|
|
370
|
+
"commit_hash": snapshot.commit_hash,
|
|
371
|
+
"files": snapshot.files,
|
|
372
|
+
"metadata": snapshot.metadata,
|
|
373
|
+
}
|
|
374
|
+
return json.dumps(data, indent=2)
|
|
375
|
+
|
|
376
|
+
def export_markdown(self, snapshot: TemporalSnapshot) -> str:
|
|
377
|
+
"""Export snapshot as Markdown."""
|
|
378
|
+
lines = [
|
|
379
|
+
f"# Memory Snapshot",
|
|
380
|
+
f"",
|
|
381
|
+
f"**Time:** {snapshot.timestamp}",
|
|
382
|
+
f"**Commit:** {snapshot.commit_hash}",
|
|
383
|
+
f"",
|
|
384
|
+
f"## Files ({len(snapshot.files)})",
|
|
385
|
+
"",
|
|
386
|
+
]
|
|
387
|
+
|
|
388
|
+
for path, content in sorted(snapshot.files.items()):
|
|
389
|
+
lines.append(f"### {path}")
|
|
390
|
+
lines.append("```")
|
|
391
|
+
lines.append(content[:500] + ("..." if len(content) > 500 else ""))
|
|
392
|
+
lines.append("```")
|
|
393
|
+
lines.append("")
|
|
394
|
+
|
|
395
|
+
return "\n".join(lines)
|
|
396
|
+
|
|
397
|
+
def export_archive(self, snapshot: TemporalSnapshot, output_dir: Path) -> Path:
|
|
398
|
+
"""Export snapshot as a file archive."""
|
|
399
|
+
output_dir = Path(output_dir)
|
|
400
|
+
archive_dir = output_dir / f"snapshot_{snapshot.commit_hash}"
|
|
401
|
+
archive_dir.mkdir(parents=True, exist_ok=True)
|
|
402
|
+
|
|
403
|
+
# Write files
|
|
404
|
+
for path, content in snapshot.files.items():
|
|
405
|
+
file_path = archive_dir / path
|
|
406
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
407
|
+
file_path.write_text(content)
|
|
408
|
+
|
|
409
|
+
# Write metadata
|
|
410
|
+
meta_file = archive_dir / "_snapshot_meta.json"
|
|
411
|
+
meta_file.write_text(json.dumps(snapshot.metadata, indent=2))
|
|
412
|
+
|
|
413
|
+
return archive_dir
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
# --- Dashboard Helper ---
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def get_timetravel_dashboard(repo_root: Path) -> Dict[str, Any]:
|
|
420
|
+
"""Get data for time-travel dashboard."""
|
|
421
|
+
navigator = TemporalNavigator(repo_root)
|
|
422
|
+
visualizer = TimelineVisualizer(repo_root)
|
|
423
|
+
|
|
424
|
+
timeline = visualizer.get_activity_timeline(days=30)
|
|
425
|
+
|
|
426
|
+
return {
|
|
427
|
+
"timeline": timeline,
|
|
428
|
+
"timeline_days": 30,
|
|
429
|
+
"total_commits": sum(t["count"] for t in timeline),
|
|
430
|
+
}
|