agmem 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/METADATA +20 -3
- agmem-0.1.2.dist-info/RECORD +86 -0
- memvcs/__init__.py +1 -1
- memvcs/cli.py +35 -31
- memvcs/commands/__init__.py +9 -9
- memvcs/commands/add.py +77 -76
- memvcs/commands/blame.py +46 -53
- memvcs/commands/branch.py +13 -33
- memvcs/commands/checkout.py +27 -32
- memvcs/commands/clean.py +18 -23
- memvcs/commands/clone.py +4 -1
- memvcs/commands/commit.py +40 -39
- memvcs/commands/daemon.py +81 -76
- memvcs/commands/decay.py +77 -0
- memvcs/commands/diff.py +56 -57
- memvcs/commands/distill.py +74 -0
- memvcs/commands/fsck.py +55 -61
- memvcs/commands/garden.py +28 -37
- memvcs/commands/graph.py +41 -48
- memvcs/commands/init.py +16 -24
- memvcs/commands/log.py +25 -40
- memvcs/commands/merge.py +16 -28
- memvcs/commands/pack.py +129 -0
- memvcs/commands/pull.py +4 -1
- memvcs/commands/push.py +4 -2
- memvcs/commands/recall.py +145 -0
- memvcs/commands/reflog.py +13 -22
- memvcs/commands/remote.py +1 -0
- memvcs/commands/repair.py +66 -0
- memvcs/commands/reset.py +23 -33
- memvcs/commands/resurrect.py +82 -0
- memvcs/commands/search.py +3 -4
- memvcs/commands/serve.py +2 -1
- memvcs/commands/show.py +66 -36
- memvcs/commands/stash.py +34 -34
- memvcs/commands/status.py +27 -35
- memvcs/commands/tag.py +23 -47
- memvcs/commands/test.py +30 -44
- memvcs/commands/timeline.py +111 -0
- memvcs/commands/tree.py +26 -27
- memvcs/commands/verify.py +59 -0
- memvcs/commands/when.py +115 -0
- memvcs/core/access_index.py +167 -0
- memvcs/core/config_loader.py +3 -1
- memvcs/core/consistency.py +214 -0
- memvcs/core/decay.py +185 -0
- memvcs/core/diff.py +158 -143
- memvcs/core/distiller.py +277 -0
- memvcs/core/gardener.py +164 -132
- memvcs/core/hooks.py +48 -14
- memvcs/core/knowledge_graph.py +134 -138
- memvcs/core/merge.py +248 -171
- memvcs/core/objects.py +95 -96
- memvcs/core/pii_scanner.py +147 -146
- memvcs/core/refs.py +132 -115
- memvcs/core/repository.py +174 -164
- memvcs/core/schema.py +155 -113
- memvcs/core/staging.py +60 -65
- memvcs/core/storage/__init__.py +20 -18
- memvcs/core/storage/base.py +74 -70
- memvcs/core/storage/gcs.py +70 -68
- memvcs/core/storage/local.py +42 -40
- memvcs/core/storage/s3.py +105 -110
- memvcs/core/temporal_index.py +112 -0
- memvcs/core/test_runner.py +101 -93
- memvcs/core/vector_store.py +41 -35
- memvcs/integrations/mcp_server.py +1 -3
- memvcs/integrations/web_ui/server.py +25 -26
- memvcs/retrieval/__init__.py +22 -0
- memvcs/retrieval/base.py +54 -0
- memvcs/retrieval/pack.py +128 -0
- memvcs/retrieval/recaller.py +105 -0
- memvcs/retrieval/strategies.py +314 -0
- memvcs/utils/__init__.py +3 -3
- memvcs/utils/helpers.py +52 -52
- agmem-0.1.1.dist-info/RECORD +0 -67
- {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/WHEEL +0 -0
- {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/entry_points.txt +0 -0
- {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/licenses/LICENSE +0 -0
- {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/top_level.txt +0 -0
memvcs/core/staging.py
CHANGED
|
@@ -15,6 +15,7 @@ from dataclasses import dataclass, asdict
|
|
|
15
15
|
@dataclass
|
|
16
16
|
class StagedFile:
|
|
17
17
|
"""Represents a file in the staging area."""
|
|
18
|
+
|
|
18
19
|
path: str # Relative path from current/
|
|
19
20
|
blob_hash: str
|
|
20
21
|
mode: int = 0o100644 # Regular file
|
|
@@ -35,14 +36,14 @@ def _path_under_root(relative_path: str, root: Path) -> Optional[Path]:
|
|
|
35
36
|
|
|
36
37
|
class StagingArea:
|
|
37
38
|
"""Manages the staging area for memory commits."""
|
|
38
|
-
|
|
39
|
+
|
|
39
40
|
def __init__(self, mem_dir: Path):
|
|
40
41
|
self.mem_dir = Path(mem_dir)
|
|
41
|
-
self.staging_dir = self.mem_dir /
|
|
42
|
-
self.index_file = self.mem_dir /
|
|
42
|
+
self.staging_dir = self.mem_dir / "staging"
|
|
43
|
+
self.index_file = self.mem_dir / "index.json"
|
|
43
44
|
self._index: Dict[str, StagedFile] = {}
|
|
44
45
|
self._load_index()
|
|
45
|
-
|
|
46
|
+
|
|
46
47
|
def _load_index(self):
|
|
47
48
|
"""Load the staging index from disk."""
|
|
48
49
|
if self.index_file.exists():
|
|
@@ -52,72 +53,62 @@ class StagingArea:
|
|
|
52
53
|
if _path_under_root(path, self.staging_dir) is None:
|
|
53
54
|
continue
|
|
54
55
|
self._index[path] = StagedFile(
|
|
55
|
-
path=path,
|
|
56
|
-
blob_hash=info['blob_hash'],
|
|
57
|
-
mode=info.get('mode', 0o100644)
|
|
56
|
+
path=path, blob_hash=info["blob_hash"], mode=info.get("mode", 0o100644)
|
|
58
57
|
)
|
|
59
58
|
except (json.JSONDecodeError, KeyError):
|
|
60
59
|
self._index = {}
|
|
61
|
-
|
|
60
|
+
|
|
62
61
|
def _save_index(self):
|
|
63
62
|
"""Save the staging index to disk."""
|
|
64
63
|
data = {
|
|
65
|
-
path: {
|
|
66
|
-
'blob_hash': sf.blob_hash,
|
|
67
|
-
'mode': sf.mode
|
|
68
|
-
}
|
|
69
|
-
for path, sf in self._index.items()
|
|
64
|
+
path: {"blob_hash": sf.blob_hash, "mode": sf.mode} for path, sf in self._index.items()
|
|
70
65
|
}
|
|
71
66
|
self.index_file.write_text(json.dumps(data, indent=2))
|
|
72
|
-
|
|
67
|
+
|
|
73
68
|
def add(self, filepath: str, blob_hash: str, content: bytes, mode: int = 0o100644):
|
|
74
69
|
"""
|
|
75
70
|
Add a file to the staging area.
|
|
76
|
-
|
|
71
|
+
|
|
77
72
|
Args:
|
|
78
73
|
filepath: Relative path from current/
|
|
79
74
|
blob_hash: Hash of the blob object
|
|
80
75
|
content: File content bytes
|
|
81
76
|
mode: File mode (default 0o100644 for regular file)
|
|
82
|
-
|
|
77
|
+
|
|
83
78
|
Raises:
|
|
84
79
|
ValueError: If filepath escapes staging directory (path traversal)
|
|
85
80
|
"""
|
|
86
81
|
staging_path = _path_under_root(filepath, self.staging_dir)
|
|
87
82
|
if staging_path is None:
|
|
88
83
|
raise ValueError(f"Path escapes staging area: {filepath}")
|
|
89
|
-
|
|
90
|
-
self._index[filepath] = StagedFile(
|
|
91
|
-
|
|
92
|
-
blob_hash=blob_hash,
|
|
93
|
-
mode=mode
|
|
94
|
-
)
|
|
95
|
-
|
|
84
|
+
|
|
85
|
+
self._index[filepath] = StagedFile(path=filepath, blob_hash=blob_hash, mode=mode)
|
|
86
|
+
|
|
96
87
|
staging_path.parent.mkdir(parents=True, exist_ok=True)
|
|
97
88
|
staging_path.write_bytes(content)
|
|
98
|
-
|
|
89
|
+
|
|
99
90
|
self._save_index()
|
|
100
|
-
|
|
91
|
+
|
|
101
92
|
def remove(self, filepath: str) -> bool:
|
|
102
93
|
"""
|
|
103
94
|
Remove a file from the staging area.
|
|
104
|
-
|
|
95
|
+
|
|
105
96
|
Returns:
|
|
106
97
|
True if file was in staging, False otherwise
|
|
107
98
|
"""
|
|
108
99
|
if filepath in self._index:
|
|
109
100
|
del self._index[filepath]
|
|
110
|
-
|
|
101
|
+
|
|
111
102
|
staging_path = _path_under_root(filepath, self.staging_dir)
|
|
112
103
|
if staging_path is not None and staging_path.exists():
|
|
113
104
|
staging_path.unlink()
|
|
114
105
|
# Clean up empty directories
|
|
115
106
|
self._cleanup_empty_dirs(staging_path.parent)
|
|
116
|
-
|
|
107
|
+
|
|
117
108
|
self._save_index()
|
|
118
109
|
return True
|
|
119
110
|
return False
|
|
120
|
-
|
|
111
|
+
|
|
121
112
|
def _cleanup_empty_dirs(self, dir_path: Path):
|
|
122
113
|
"""Remove empty directories up to staging root."""
|
|
123
114
|
try:
|
|
@@ -129,99 +120,103 @@ class StagingArea:
|
|
|
129
120
|
break
|
|
130
121
|
except OSError:
|
|
131
122
|
pass
|
|
132
|
-
|
|
123
|
+
|
|
133
124
|
def get_staged_files(self) -> Dict[str, StagedFile]:
|
|
134
125
|
"""Get all staged files."""
|
|
135
126
|
return dict(self._index)
|
|
136
|
-
|
|
127
|
+
|
|
137
128
|
def is_staged(self, filepath: str) -> bool:
|
|
138
129
|
"""Check if a file is staged."""
|
|
139
130
|
return filepath in self._index
|
|
140
|
-
|
|
131
|
+
|
|
141
132
|
def get_blob_hash(self, filepath: str) -> Optional[str]:
|
|
142
133
|
"""Get the blob hash for a staged file."""
|
|
143
134
|
if filepath in self._index:
|
|
144
135
|
return self._index[filepath].blob_hash
|
|
145
136
|
return None
|
|
146
|
-
|
|
137
|
+
|
|
147
138
|
def clear(self):
|
|
148
139
|
"""Clear the entire staging area."""
|
|
149
140
|
self._index = {}
|
|
150
|
-
|
|
141
|
+
|
|
151
142
|
# Remove staging directory contents
|
|
152
143
|
if self.staging_dir.exists():
|
|
153
144
|
shutil.rmtree(self.staging_dir)
|
|
154
145
|
self.staging_dir.mkdir(parents=True, exist_ok=True)
|
|
155
|
-
|
|
146
|
+
|
|
156
147
|
# Remove index file
|
|
157
148
|
if self.index_file.exists():
|
|
158
149
|
self.index_file.unlink()
|
|
159
|
-
|
|
150
|
+
|
|
160
151
|
def get_status(self) -> Dict[str, List[str]]:
|
|
161
152
|
"""
|
|
162
153
|
Get staging status.
|
|
163
|
-
|
|
154
|
+
|
|
164
155
|
Returns:
|
|
165
156
|
Dict with 'staged', 'modified', 'deleted', 'untracked' lists
|
|
166
157
|
"""
|
|
167
158
|
staged = list(self._index.keys())
|
|
168
|
-
|
|
159
|
+
|
|
169
160
|
return {
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
161
|
+
"staged": staged,
|
|
162
|
+
"modified": [], # TODO: Compare with working directory
|
|
163
|
+
"deleted": [], # TODO: Check if files were deleted
|
|
164
|
+
"untracked": [], # TODO: Find untracked files
|
|
174
165
|
}
|
|
175
|
-
|
|
166
|
+
|
|
176
167
|
def get_tree_entries(self) -> List[Dict]:
|
|
177
168
|
"""
|
|
178
169
|
Get tree entries for creating a tree object.
|
|
179
|
-
|
|
170
|
+
|
|
180
171
|
Returns:
|
|
181
172
|
List of entry dictionaries for Tree creation
|
|
182
173
|
"""
|
|
183
174
|
entries = []
|
|
184
175
|
for path, sf in self._index.items():
|
|
185
|
-
entries.append(
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
176
|
+
entries.append(
|
|
177
|
+
{
|
|
178
|
+
"mode": oct(sf.mode)[2:], # Convert to string like '100644'
|
|
179
|
+
"type": "blob",
|
|
180
|
+
"hash": sf.blob_hash,
|
|
181
|
+
"name": Path(path).name,
|
|
182
|
+
"path": str(Path(path).parent) if str(Path(path).parent) != "." else "",
|
|
183
|
+
}
|
|
184
|
+
)
|
|
192
185
|
return entries
|
|
193
|
-
|
|
186
|
+
|
|
194
187
|
def diff_with_head(self, repo) -> Dict[str, Dict]:
|
|
195
188
|
"""
|
|
196
189
|
Compare staging area with HEAD commit.
|
|
197
|
-
|
|
190
|
+
|
|
198
191
|
Returns:
|
|
199
192
|
Dict mapping file paths to change info
|
|
200
193
|
"""
|
|
201
194
|
changes = {}
|
|
202
|
-
|
|
195
|
+
|
|
203
196
|
# Get HEAD tree
|
|
204
197
|
head_commit = repo.get_head_commit()
|
|
205
198
|
if head_commit:
|
|
206
|
-
head_tree_bytes = repo.object_store.retrieve(head_commit.tree,
|
|
199
|
+
head_tree_bytes = repo.object_store.retrieve(head_commit.tree, "tree")
|
|
207
200
|
if head_tree_bytes:
|
|
208
|
-
head_data = json.loads(head_tree_bytes.decode(
|
|
209
|
-
head_entries = {
|
|
210
|
-
|
|
201
|
+
head_data = json.loads(head_tree_bytes.decode("utf-8"))
|
|
202
|
+
head_entries = {
|
|
203
|
+
e["path"] + "/" + e["name"] if e["path"] else e["name"]: e
|
|
204
|
+
for e in head_data.get("entries", [])
|
|
205
|
+
}
|
|
211
206
|
else:
|
|
212
207
|
head_entries = {}
|
|
213
|
-
|
|
208
|
+
|
|
214
209
|
# Compare with staging
|
|
215
210
|
for path, sf in self._index.items():
|
|
216
211
|
if path in head_entries:
|
|
217
|
-
if head_entries[path][
|
|
218
|
-
changes[path] = {
|
|
212
|
+
if head_entries[path]["hash"] != sf.blob_hash:
|
|
213
|
+
changes[path] = {"status": "modified", "blob_hash": sf.blob_hash}
|
|
219
214
|
else:
|
|
220
|
-
changes[path] = {
|
|
221
|
-
|
|
215
|
+
changes[path] = {"status": "added", "blob_hash": sf.blob_hash}
|
|
216
|
+
|
|
222
217
|
# Check for deleted files
|
|
223
218
|
for path in head_entries:
|
|
224
219
|
if path not in self._index:
|
|
225
|
-
changes[path] = {
|
|
226
|
-
|
|
220
|
+
changes[path] = {"status": "deleted"}
|
|
221
|
+
|
|
227
222
|
return changes
|
memvcs/core/storage/__init__.py
CHANGED
|
@@ -10,22 +10,24 @@ from .base import StorageAdapter, StorageError, LockError
|
|
|
10
10
|
from .local import LocalStorageAdapter
|
|
11
11
|
|
|
12
12
|
__all__ = [
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
"StorageAdapter",
|
|
14
|
+
"StorageError",
|
|
15
|
+
"LockError",
|
|
16
|
+
"LocalStorageAdapter",
|
|
17
17
|
]
|
|
18
18
|
|
|
19
19
|
# Try to import optional cloud adapters
|
|
20
20
|
try:
|
|
21
21
|
from .s3 import S3StorageAdapter
|
|
22
|
-
|
|
22
|
+
|
|
23
|
+
__all__.append("S3StorageAdapter")
|
|
23
24
|
except ImportError:
|
|
24
25
|
pass
|
|
25
26
|
|
|
26
27
|
try:
|
|
27
28
|
from .gcs import GCSStorageAdapter
|
|
28
|
-
|
|
29
|
+
|
|
30
|
+
__all__.append("GCSStorageAdapter")
|
|
29
31
|
except ImportError:
|
|
30
32
|
pass
|
|
31
33
|
|
|
@@ -33,40 +35,40 @@ except ImportError:
|
|
|
33
35
|
def get_adapter(url: str, config: Optional[dict] = None) -> StorageAdapter:
|
|
34
36
|
"""
|
|
35
37
|
Get the appropriate storage adapter for a URL.
|
|
36
|
-
|
|
38
|
+
|
|
37
39
|
Args:
|
|
38
40
|
url: Storage URL (file://, s3://, gs://)
|
|
39
41
|
config: Optional agmem config dict (from load_agmem_config). Used for
|
|
40
42
|
S3/GCS credentials and options; credentials resolved from env only.
|
|
41
|
-
|
|
43
|
+
|
|
42
44
|
Returns:
|
|
43
45
|
Appropriate StorageAdapter instance
|
|
44
|
-
|
|
46
|
+
|
|
45
47
|
Raises:
|
|
46
48
|
ValueError: If URL scheme is not supported
|
|
47
49
|
"""
|
|
48
|
-
if url.startswith(
|
|
50
|
+
if url.startswith("file://"):
|
|
49
51
|
path = url[7:] # Remove 'file://' prefix
|
|
50
52
|
return LocalStorageAdapter(path)
|
|
51
|
-
|
|
52
|
-
elif url.startswith(
|
|
53
|
+
|
|
54
|
+
elif url.startswith("s3://"):
|
|
53
55
|
try:
|
|
54
56
|
from .s3 import S3StorageAdapter
|
|
57
|
+
|
|
55
58
|
return S3StorageAdapter.from_url(url, config=config)
|
|
56
59
|
except ImportError:
|
|
57
|
-
raise ImportError(
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
elif url.startswith('gs://'):
|
|
60
|
+
raise ImportError("S3 storage requires boto3. Install with: pip install agmem[cloud]")
|
|
61
|
+
|
|
62
|
+
elif url.startswith("gs://"):
|
|
62
63
|
try:
|
|
63
64
|
from .gcs import GCSStorageAdapter
|
|
65
|
+
|
|
64
66
|
return GCSStorageAdapter.from_url(url, config=config)
|
|
65
67
|
except ImportError:
|
|
66
68
|
raise ImportError(
|
|
67
69
|
"GCS storage requires google-cloud-storage. Install with: pip install agmem[cloud]"
|
|
68
70
|
)
|
|
69
|
-
|
|
71
|
+
|
|
70
72
|
else:
|
|
71
73
|
# Assume local path
|
|
72
74
|
return LocalStorageAdapter(url)
|