agmem 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agmem-0.1.1.dist-info/METADATA +656 -0
- agmem-0.1.1.dist-info/RECORD +67 -0
- agmem-0.1.1.dist-info/WHEEL +5 -0
- agmem-0.1.1.dist-info/entry_points.txt +2 -0
- agmem-0.1.1.dist-info/licenses/LICENSE +21 -0
- agmem-0.1.1.dist-info/top_level.txt +1 -0
- memvcs/__init__.py +9 -0
- memvcs/cli.py +178 -0
- memvcs/commands/__init__.py +23 -0
- memvcs/commands/add.py +258 -0
- memvcs/commands/base.py +23 -0
- memvcs/commands/blame.py +169 -0
- memvcs/commands/branch.py +110 -0
- memvcs/commands/checkout.py +101 -0
- memvcs/commands/clean.py +76 -0
- memvcs/commands/clone.py +91 -0
- memvcs/commands/commit.py +174 -0
- memvcs/commands/daemon.py +267 -0
- memvcs/commands/diff.py +157 -0
- memvcs/commands/fsck.py +203 -0
- memvcs/commands/garden.py +107 -0
- memvcs/commands/graph.py +151 -0
- memvcs/commands/init.py +61 -0
- memvcs/commands/log.py +103 -0
- memvcs/commands/mcp.py +59 -0
- memvcs/commands/merge.py +88 -0
- memvcs/commands/pull.py +65 -0
- memvcs/commands/push.py +143 -0
- memvcs/commands/reflog.py +52 -0
- memvcs/commands/remote.py +51 -0
- memvcs/commands/reset.py +98 -0
- memvcs/commands/search.py +163 -0
- memvcs/commands/serve.py +54 -0
- memvcs/commands/show.py +125 -0
- memvcs/commands/stash.py +97 -0
- memvcs/commands/status.py +112 -0
- memvcs/commands/tag.py +117 -0
- memvcs/commands/test.py +132 -0
- memvcs/commands/tree.py +156 -0
- memvcs/core/__init__.py +21 -0
- memvcs/core/config_loader.py +245 -0
- memvcs/core/constants.py +12 -0
- memvcs/core/diff.py +380 -0
- memvcs/core/gardener.py +466 -0
- memvcs/core/hooks.py +151 -0
- memvcs/core/knowledge_graph.py +381 -0
- memvcs/core/merge.py +474 -0
- memvcs/core/objects.py +323 -0
- memvcs/core/pii_scanner.py +343 -0
- memvcs/core/refs.py +447 -0
- memvcs/core/remote.py +278 -0
- memvcs/core/repository.py +522 -0
- memvcs/core/schema.py +414 -0
- memvcs/core/staging.py +227 -0
- memvcs/core/storage/__init__.py +72 -0
- memvcs/core/storage/base.py +359 -0
- memvcs/core/storage/gcs.py +308 -0
- memvcs/core/storage/local.py +182 -0
- memvcs/core/storage/s3.py +369 -0
- memvcs/core/test_runner.py +371 -0
- memvcs/core/vector_store.py +313 -0
- memvcs/integrations/__init__.py +5 -0
- memvcs/integrations/mcp_server.py +267 -0
- memvcs/integrations/web_ui/__init__.py +1 -0
- memvcs/integrations/web_ui/server.py +352 -0
- memvcs/utils/__init__.py +9 -0
- memvcs/utils/helpers.py +178 -0
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Storage adapters for agmem.
|
|
3
|
+
|
|
4
|
+
Provides abstraction layer for different storage backends.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from .base import StorageAdapter, StorageError, LockError
|
|
10
|
+
from .local import LocalStorageAdapter
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
'StorageAdapter',
|
|
14
|
+
'StorageError',
|
|
15
|
+
'LockError',
|
|
16
|
+
'LocalStorageAdapter',
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
# Try to import optional cloud adapters
|
|
20
|
+
try:
|
|
21
|
+
from .s3 import S3StorageAdapter
|
|
22
|
+
__all__.append('S3StorageAdapter')
|
|
23
|
+
except ImportError:
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
from .gcs import GCSStorageAdapter
|
|
28
|
+
__all__.append('GCSStorageAdapter')
|
|
29
|
+
except ImportError:
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_adapter(url: str, config: Optional[dict] = None) -> StorageAdapter:
|
|
34
|
+
"""
|
|
35
|
+
Get the appropriate storage adapter for a URL.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
url: Storage URL (file://, s3://, gs://)
|
|
39
|
+
config: Optional agmem config dict (from load_agmem_config). Used for
|
|
40
|
+
S3/GCS credentials and options; credentials resolved from env only.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Appropriate StorageAdapter instance
|
|
44
|
+
|
|
45
|
+
Raises:
|
|
46
|
+
ValueError: If URL scheme is not supported
|
|
47
|
+
"""
|
|
48
|
+
if url.startswith('file://'):
|
|
49
|
+
path = url[7:] # Remove 'file://' prefix
|
|
50
|
+
return LocalStorageAdapter(path)
|
|
51
|
+
|
|
52
|
+
elif url.startswith('s3://'):
|
|
53
|
+
try:
|
|
54
|
+
from .s3 import S3StorageAdapter
|
|
55
|
+
return S3StorageAdapter.from_url(url, config=config)
|
|
56
|
+
except ImportError:
|
|
57
|
+
raise ImportError(
|
|
58
|
+
"S3 storage requires boto3. Install with: pip install agmem[cloud]"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
elif url.startswith('gs://'):
|
|
62
|
+
try:
|
|
63
|
+
from .gcs import GCSStorageAdapter
|
|
64
|
+
return GCSStorageAdapter.from_url(url, config=config)
|
|
65
|
+
except ImportError:
|
|
66
|
+
raise ImportError(
|
|
67
|
+
"GCS storage requires google-cloud-storage. Install with: pip install agmem[cloud]"
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
else:
|
|
71
|
+
# Assume local path
|
|
72
|
+
return LocalStorageAdapter(url)
|
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Base storage adapter interface for agmem.
|
|
3
|
+
|
|
4
|
+
Defines the abstract interface that all storage backends must implement.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from abc import ABC, abstractmethod
|
|
8
|
+
from typing import List, Optional, Iterator
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class StorageError(Exception):
|
|
14
|
+
"""Base exception for storage operations."""
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class LockError(StorageError):
|
|
19
|
+
"""Exception raised when a lock cannot be acquired."""
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class FileInfo:
|
|
25
|
+
"""Information about a file in storage."""
|
|
26
|
+
path: str
|
|
27
|
+
size: int
|
|
28
|
+
modified: Optional[str] = None # ISO 8601 timestamp
|
|
29
|
+
is_dir: bool = False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class StorageAdapter(ABC):
|
|
33
|
+
"""
|
|
34
|
+
Abstract base class for storage adapters.
|
|
35
|
+
|
|
36
|
+
All storage backends (local filesystem, S3, GCS, etc.) must implement
|
|
37
|
+
this interface to provide consistent access to storage operations.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
@abstractmethod
|
|
41
|
+
def read_file(self, path: str) -> bytes:
|
|
42
|
+
"""
|
|
43
|
+
Read a file's contents.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
path: Path to the file (relative to storage root)
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
File contents as bytes
|
|
50
|
+
|
|
51
|
+
Raises:
|
|
52
|
+
StorageError: If file doesn't exist or can't be read
|
|
53
|
+
"""
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
@abstractmethod
|
|
57
|
+
def write_file(self, path: str, data: bytes) -> None:
|
|
58
|
+
"""
|
|
59
|
+
Write data to a file.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
path: Path to the file (relative to storage root)
|
|
63
|
+
data: Data to write
|
|
64
|
+
|
|
65
|
+
Raises:
|
|
66
|
+
StorageError: If file can't be written
|
|
67
|
+
"""
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
@abstractmethod
|
|
71
|
+
def exists(self, path: str) -> bool:
|
|
72
|
+
"""
|
|
73
|
+
Check if a path exists.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
path: Path to check
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
True if path exists, False otherwise
|
|
80
|
+
"""
|
|
81
|
+
pass
|
|
82
|
+
|
|
83
|
+
@abstractmethod
|
|
84
|
+
def delete(self, path: str) -> bool:
|
|
85
|
+
"""
|
|
86
|
+
Delete a file.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
path: Path to the file
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
True if deleted, False if not found
|
|
93
|
+
"""
|
|
94
|
+
pass
|
|
95
|
+
|
|
96
|
+
@abstractmethod
|
|
97
|
+
def list_dir(self, path: str = "") -> List[FileInfo]:
|
|
98
|
+
"""
|
|
99
|
+
List contents of a directory.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
path: Directory path (empty for root)
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
List of FileInfo objects for directory contents
|
|
106
|
+
"""
|
|
107
|
+
pass
|
|
108
|
+
|
|
109
|
+
@abstractmethod
|
|
110
|
+
def makedirs(self, path: str) -> None:
|
|
111
|
+
"""
|
|
112
|
+
Create directory and any necessary parent directories.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
path: Directory path to create
|
|
116
|
+
"""
|
|
117
|
+
pass
|
|
118
|
+
|
|
119
|
+
@abstractmethod
|
|
120
|
+
def is_dir(self, path: str) -> bool:
|
|
121
|
+
"""
|
|
122
|
+
Check if path is a directory.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
path: Path to check
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
True if path is a directory
|
|
129
|
+
"""
|
|
130
|
+
pass
|
|
131
|
+
|
|
132
|
+
# Lock management methods
|
|
133
|
+
|
|
134
|
+
@abstractmethod
|
|
135
|
+
def acquire_lock(self, lock_name: str, timeout: int = 30) -> bool:
|
|
136
|
+
"""
|
|
137
|
+
Acquire a distributed lock.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
lock_name: Name of the lock to acquire
|
|
141
|
+
timeout: Maximum seconds to wait for lock
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
True if lock acquired successfully
|
|
145
|
+
|
|
146
|
+
Raises:
|
|
147
|
+
LockError: If lock cannot be acquired within timeout
|
|
148
|
+
"""
|
|
149
|
+
pass
|
|
150
|
+
|
|
151
|
+
@abstractmethod
|
|
152
|
+
def release_lock(self, lock_name: str) -> None:
|
|
153
|
+
"""
|
|
154
|
+
Release a distributed lock.
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
lock_name: Name of the lock to release
|
|
158
|
+
"""
|
|
159
|
+
pass
|
|
160
|
+
|
|
161
|
+
@abstractmethod
|
|
162
|
+
def is_locked(self, lock_name: str) -> bool:
|
|
163
|
+
"""
|
|
164
|
+
Check if a lock is currently held.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
lock_name: Name of the lock to check
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
True if lock is held
|
|
171
|
+
"""
|
|
172
|
+
pass
|
|
173
|
+
|
|
174
|
+
# Convenience methods (can be overridden for efficiency)
|
|
175
|
+
|
|
176
|
+
def read_text(self, path: str, encoding: str = 'utf-8') -> str:
|
|
177
|
+
"""Read file as text."""
|
|
178
|
+
return self.read_file(path).decode(encoding)
|
|
179
|
+
|
|
180
|
+
def write_text(self, path: str, text: str, encoding: str = 'utf-8') -> None:
|
|
181
|
+
"""Write text to file."""
|
|
182
|
+
self.write_file(path, text.encode(encoding))
|
|
183
|
+
|
|
184
|
+
def walk(self, path: str = "") -> Iterator[tuple]:
|
|
185
|
+
"""
|
|
186
|
+
Walk through directory tree.
|
|
187
|
+
|
|
188
|
+
Yields:
|
|
189
|
+
Tuples of (dirpath, dirnames, filenames)
|
|
190
|
+
"""
|
|
191
|
+
contents = self.list_dir(path)
|
|
192
|
+
|
|
193
|
+
dirs = []
|
|
194
|
+
files = []
|
|
195
|
+
|
|
196
|
+
for item in contents:
|
|
197
|
+
if item.is_dir:
|
|
198
|
+
dirs.append(item.path.split('/')[-1])
|
|
199
|
+
else:
|
|
200
|
+
files.append(item.path.split('/')[-1])
|
|
201
|
+
|
|
202
|
+
yield (path, dirs, files)
|
|
203
|
+
|
|
204
|
+
for dirname in dirs:
|
|
205
|
+
subpath = f"{path}/{dirname}" if path else dirname
|
|
206
|
+
yield from self.walk(subpath)
|
|
207
|
+
|
|
208
|
+
def copy_file(self, src: str, dst: str) -> None:
|
|
209
|
+
"""Copy a file within storage."""
|
|
210
|
+
data = self.read_file(src)
|
|
211
|
+
self.write_file(dst, data)
|
|
212
|
+
|
|
213
|
+
def move_file(self, src: str, dst: str) -> None:
|
|
214
|
+
"""Move a file within storage."""
|
|
215
|
+
self.copy_file(src, dst)
|
|
216
|
+
self.delete(src)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
class CachingStorageAdapter(StorageAdapter):
|
|
220
|
+
"""
|
|
221
|
+
Storage adapter that caches remote operations locally.
|
|
222
|
+
|
|
223
|
+
Used for cloud storage backends to minimize network requests.
|
|
224
|
+
"""
|
|
225
|
+
|
|
226
|
+
def __init__(self, remote: StorageAdapter, cache_dir: str):
|
|
227
|
+
"""
|
|
228
|
+
Initialize caching adapter.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
remote: Remote storage adapter
|
|
232
|
+
cache_dir: Local directory for caching
|
|
233
|
+
"""
|
|
234
|
+
self.remote = remote
|
|
235
|
+
self.cache_dir = Path(cache_dir)
|
|
236
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
237
|
+
self._dirty: set = set() # Paths that need to be pushed
|
|
238
|
+
|
|
239
|
+
def _cache_path(self, path: str) -> Path:
|
|
240
|
+
"""Get local cache path for a remote path."""
|
|
241
|
+
return self.cache_dir / path
|
|
242
|
+
|
|
243
|
+
def read_file(self, path: str) -> bytes:
|
|
244
|
+
"""Read from cache, fetching from remote if needed."""
|
|
245
|
+
cache_path = self._cache_path(path)
|
|
246
|
+
|
|
247
|
+
if not cache_path.exists():
|
|
248
|
+
# Fetch from remote
|
|
249
|
+
data = self.remote.read_file(path)
|
|
250
|
+
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
|
251
|
+
cache_path.write_bytes(data)
|
|
252
|
+
|
|
253
|
+
return cache_path.read_bytes()
|
|
254
|
+
|
|
255
|
+
def write_file(self, path: str, data: bytes) -> None:
|
|
256
|
+
"""Write to cache and mark as dirty."""
|
|
257
|
+
cache_path = self._cache_path(path)
|
|
258
|
+
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
|
259
|
+
cache_path.write_bytes(data)
|
|
260
|
+
self._dirty.add(path)
|
|
261
|
+
|
|
262
|
+
def exists(self, path: str) -> bool:
|
|
263
|
+
"""Check if path exists in cache or remote."""
|
|
264
|
+
cache_path = self._cache_path(path)
|
|
265
|
+
return cache_path.exists() or self.remote.exists(path)
|
|
266
|
+
|
|
267
|
+
def delete(self, path: str) -> bool:
|
|
268
|
+
"""Delete from cache and remote."""
|
|
269
|
+
cache_path = self._cache_path(path)
|
|
270
|
+
if cache_path.exists():
|
|
271
|
+
cache_path.unlink()
|
|
272
|
+
self._dirty.discard(path)
|
|
273
|
+
return self.remote.delete(path)
|
|
274
|
+
|
|
275
|
+
def list_dir(self, path: str = "") -> List[FileInfo]:
|
|
276
|
+
"""List directory from remote."""
|
|
277
|
+
return self.remote.list_dir(path)
|
|
278
|
+
|
|
279
|
+
def makedirs(self, path: str) -> None:
|
|
280
|
+
"""Create directory in cache."""
|
|
281
|
+
cache_path = self._cache_path(path)
|
|
282
|
+
cache_path.mkdir(parents=True, exist_ok=True)
|
|
283
|
+
|
|
284
|
+
def is_dir(self, path: str) -> bool:
|
|
285
|
+
"""Check if path is directory."""
|
|
286
|
+
cache_path = self._cache_path(path)
|
|
287
|
+
if cache_path.exists():
|
|
288
|
+
return cache_path.is_dir()
|
|
289
|
+
return self.remote.is_dir(path)
|
|
290
|
+
|
|
291
|
+
def acquire_lock(self, lock_name: str, timeout: int = 30) -> bool:
|
|
292
|
+
"""Acquire lock on remote."""
|
|
293
|
+
return self.remote.acquire_lock(lock_name, timeout)
|
|
294
|
+
|
|
295
|
+
def release_lock(self, lock_name: str) -> None:
|
|
296
|
+
"""Release lock on remote."""
|
|
297
|
+
self.remote.release_lock(lock_name)
|
|
298
|
+
|
|
299
|
+
def is_locked(self, lock_name: str) -> bool:
|
|
300
|
+
"""Check if lock is held on remote."""
|
|
301
|
+
return self.remote.is_locked(lock_name)
|
|
302
|
+
|
|
303
|
+
def sync_to_remote(self) -> int:
|
|
304
|
+
"""
|
|
305
|
+
Push all dirty files to remote.
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
Number of files synced
|
|
309
|
+
"""
|
|
310
|
+
count = 0
|
|
311
|
+
for path in list(self._dirty):
|
|
312
|
+
cache_path = self._cache_path(path)
|
|
313
|
+
if cache_path.exists():
|
|
314
|
+
self.remote.write_file(path, cache_path.read_bytes())
|
|
315
|
+
count += 1
|
|
316
|
+
self._dirty.discard(path)
|
|
317
|
+
return count
|
|
318
|
+
|
|
319
|
+
def sync_from_remote(self, paths: Optional[List[str]] = None) -> int:
|
|
320
|
+
"""
|
|
321
|
+
Pull files from remote to cache.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
paths: Specific paths to sync, or None for all
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
Number of files synced
|
|
328
|
+
"""
|
|
329
|
+
if paths is None:
|
|
330
|
+
# Sync entire remote
|
|
331
|
+
count = 0
|
|
332
|
+
for dirpath, _, filenames in self.remote.walk():
|
|
333
|
+
for filename in filenames:
|
|
334
|
+
path = f"{dirpath}/{filename}" if dirpath else filename
|
|
335
|
+
data = self.remote.read_file(path)
|
|
336
|
+
cache_path = self._cache_path(path)
|
|
337
|
+
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
|
338
|
+
cache_path.write_bytes(data)
|
|
339
|
+
count += 1
|
|
340
|
+
return count
|
|
341
|
+
else:
|
|
342
|
+
for path in paths:
|
|
343
|
+
data = self.remote.read_file(path)
|
|
344
|
+
cache_path = self._cache_path(path)
|
|
345
|
+
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
|
346
|
+
cache_path.write_bytes(data)
|
|
347
|
+
return len(paths)
|
|
348
|
+
|
|
349
|
+
def get_dirty_paths(self) -> List[str]:
|
|
350
|
+
"""Get list of paths that need to be pushed."""
|
|
351
|
+
return list(self._dirty)
|
|
352
|
+
|
|
353
|
+
def clear_cache(self) -> None:
|
|
354
|
+
"""Clear the local cache."""
|
|
355
|
+
import shutil
|
|
356
|
+
if self.cache_dir.exists():
|
|
357
|
+
shutil.rmtree(self.cache_dir)
|
|
358
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
359
|
+
self._dirty.clear()
|