agmem 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. agmem-0.1.1.dist-info/METADATA +656 -0
  2. agmem-0.1.1.dist-info/RECORD +67 -0
  3. agmem-0.1.1.dist-info/WHEEL +5 -0
  4. agmem-0.1.1.dist-info/entry_points.txt +2 -0
  5. agmem-0.1.1.dist-info/licenses/LICENSE +21 -0
  6. agmem-0.1.1.dist-info/top_level.txt +1 -0
  7. memvcs/__init__.py +9 -0
  8. memvcs/cli.py +178 -0
  9. memvcs/commands/__init__.py +23 -0
  10. memvcs/commands/add.py +258 -0
  11. memvcs/commands/base.py +23 -0
  12. memvcs/commands/blame.py +169 -0
  13. memvcs/commands/branch.py +110 -0
  14. memvcs/commands/checkout.py +101 -0
  15. memvcs/commands/clean.py +76 -0
  16. memvcs/commands/clone.py +91 -0
  17. memvcs/commands/commit.py +174 -0
  18. memvcs/commands/daemon.py +267 -0
  19. memvcs/commands/diff.py +157 -0
  20. memvcs/commands/fsck.py +203 -0
  21. memvcs/commands/garden.py +107 -0
  22. memvcs/commands/graph.py +151 -0
  23. memvcs/commands/init.py +61 -0
  24. memvcs/commands/log.py +103 -0
  25. memvcs/commands/mcp.py +59 -0
  26. memvcs/commands/merge.py +88 -0
  27. memvcs/commands/pull.py +65 -0
  28. memvcs/commands/push.py +143 -0
  29. memvcs/commands/reflog.py +52 -0
  30. memvcs/commands/remote.py +51 -0
  31. memvcs/commands/reset.py +98 -0
  32. memvcs/commands/search.py +163 -0
  33. memvcs/commands/serve.py +54 -0
  34. memvcs/commands/show.py +125 -0
  35. memvcs/commands/stash.py +97 -0
  36. memvcs/commands/status.py +112 -0
  37. memvcs/commands/tag.py +117 -0
  38. memvcs/commands/test.py +132 -0
  39. memvcs/commands/tree.py +156 -0
  40. memvcs/core/__init__.py +21 -0
  41. memvcs/core/config_loader.py +245 -0
  42. memvcs/core/constants.py +12 -0
  43. memvcs/core/diff.py +380 -0
  44. memvcs/core/gardener.py +466 -0
  45. memvcs/core/hooks.py +151 -0
  46. memvcs/core/knowledge_graph.py +381 -0
  47. memvcs/core/merge.py +474 -0
  48. memvcs/core/objects.py +323 -0
  49. memvcs/core/pii_scanner.py +343 -0
  50. memvcs/core/refs.py +447 -0
  51. memvcs/core/remote.py +278 -0
  52. memvcs/core/repository.py +522 -0
  53. memvcs/core/schema.py +414 -0
  54. memvcs/core/staging.py +227 -0
  55. memvcs/core/storage/__init__.py +72 -0
  56. memvcs/core/storage/base.py +359 -0
  57. memvcs/core/storage/gcs.py +308 -0
  58. memvcs/core/storage/local.py +182 -0
  59. memvcs/core/storage/s3.py +369 -0
  60. memvcs/core/test_runner.py +371 -0
  61. memvcs/core/vector_store.py +313 -0
  62. memvcs/integrations/__init__.py +5 -0
  63. memvcs/integrations/mcp_server.py +267 -0
  64. memvcs/integrations/web_ui/__init__.py +1 -0
  65. memvcs/integrations/web_ui/server.py +352 -0
  66. memvcs/utils/__init__.py +9 -0
  67. memvcs/utils/helpers.py +178 -0
@@ -0,0 +1,308 @@
1
+ """
2
+ Google Cloud Storage adapter for agmem.
3
+
4
+ Credentials from config: credentials_path (validated) or credentials_info (dict
5
+ from env var containing JSON). Never store secret values in config.
6
+ """
7
+
8
+ import time
9
+ import uuid
10
+ from typing import Any, Dict, List, Optional
11
+ from datetime import datetime
12
+
13
+ try:
14
+ from google.cloud import storage
15
+ from google.cloud.exceptions import NotFound
16
+ GCS_AVAILABLE = True
17
+ except ImportError:
18
+ GCS_AVAILABLE = False
19
+
20
+ from .base import StorageAdapter, StorageError, LockError, FileInfo
21
+
22
+
23
+ def _apply_gcs_config(kwargs: Dict[str, Any], config: Optional[Dict[str, Any]]) -> None:
24
+ """Merge GCS options from agmem config into kwargs; credentials from env or validated path."""
25
+ if not config:
26
+ return
27
+ try:
28
+ from memvcs.core.config_loader import get_gcs_options_from_config
29
+ opts = get_gcs_options_from_config(config)
30
+ for key in ("project", "credentials_path", "credentials_info"):
31
+ if opts.get(key) is not None:
32
+ kwargs[key] = opts[key]
33
+ except ImportError:
34
+ pass
35
+
36
+
37
+ class GCSStorageAdapter(StorageAdapter):
38
+ """Storage adapter for Google Cloud Storage."""
39
+
40
+ def __init__(
41
+ self,
42
+ bucket: str,
43
+ prefix: str = "",
44
+ project: Optional[str] = None,
45
+ credentials_path: Optional[str] = None,
46
+ credentials_info: Optional[Dict[str, Any]] = None
47
+ ):
48
+ """
49
+ Initialize GCS storage adapter.
50
+
51
+ Args:
52
+ bucket: GCS bucket name
53
+ prefix: Key prefix for all operations
54
+ project: GCP project ID
55
+ credentials_path: Path to service account JSON file
56
+ credentials_info: Service account info dict (e.g. from env JSON)
57
+ """
58
+ if not GCS_AVAILABLE:
59
+ raise ImportError(
60
+ "google-cloud-storage is required for GCS. "
61
+ "Install with: pip install agmem[cloud]"
62
+ )
63
+
64
+ self.bucket_name = bucket
65
+ self.prefix = prefix.strip('/')
66
+ self._lock_id = str(uuid.uuid4())
67
+
68
+ # Build client: info dict > path > project > default
69
+ if credentials_info:
70
+ self.client = storage.Client.from_service_account_info(credentials_info)
71
+ elif credentials_path:
72
+ self.client = storage.Client.from_service_account_json(credentials_path)
73
+ elif project:
74
+ self.client = storage.Client(project=project)
75
+ else:
76
+ self.client = storage.Client()
77
+
78
+ self.bucket = self.client.bucket(bucket)
79
+
80
+ @classmethod
81
+ def from_url(cls, url: str, config: Optional[Dict[str, Any]] = None) -> 'GCSStorageAdapter':
82
+ """
83
+ Create adapter from GCS URL. Optional config supplies project,
84
+ credentials_path (validated), or credentials_info from env JSON.
85
+
86
+ Args:
87
+ url: GCS URL (gs://bucket/prefix)
88
+ config: Optional agmem config dict (cloud.gcs)
89
+
90
+ Returns:
91
+ GCSStorageAdapter instance
92
+ """
93
+ if not url.startswith('gs://'):
94
+ raise ValueError(f"Invalid GCS URL: {url}")
95
+ path = url[5:] # Remove 'gs://'
96
+ parts = path.split('/', 1)
97
+ bucket = parts[0]
98
+ prefix = parts[1] if len(parts) > 1 else ""
99
+ kwargs: Dict[str, Any] = {"bucket": bucket, "prefix": prefix}
100
+ _apply_gcs_config(kwargs, config)
101
+ return cls(**kwargs)
102
+
103
+ def _key(self, path: str) -> str:
104
+ """Convert relative path to GCS key."""
105
+ if not path:
106
+ return self.prefix
107
+ if self.prefix:
108
+ return f"{self.prefix}/{path}"
109
+ return path
110
+
111
+ def _path(self, key: str) -> str:
112
+ """Convert GCS key to relative path."""
113
+ if self.prefix and key.startswith(self.prefix + '/'):
114
+ return key[len(self.prefix) + 1:]
115
+ return key
116
+
117
+ def read_file(self, path: str) -> bytes:
118
+ """Read a file's contents from GCS."""
119
+ key = self._key(path)
120
+ blob = self.bucket.blob(key)
121
+
122
+ try:
123
+ return blob.download_as_bytes()
124
+ except NotFound:
125
+ raise StorageError(f"File not found: {path}")
126
+ except Exception as e:
127
+ raise StorageError(f"Error reading {path}: {e}")
128
+
129
+ def write_file(self, path: str, data: bytes) -> None:
130
+ """Write data to GCS."""
131
+ key = self._key(path)
132
+ blob = self.bucket.blob(key)
133
+
134
+ try:
135
+ blob.upload_from_string(data)
136
+ except Exception as e:
137
+ raise StorageError(f"Error writing {path}: {e}")
138
+
139
+ def exists(self, path: str) -> bool:
140
+ """Check if a key exists in GCS."""
141
+ key = self._key(path)
142
+ blob = self.bucket.blob(key)
143
+
144
+ if blob.exists():
145
+ return True
146
+
147
+ # Check if it's a "directory"
148
+ prefix = key + '/' if key else ''
149
+ blobs = list(self.bucket.list_blobs(prefix=prefix, max_results=1))
150
+ return len(blobs) > 0
151
+
152
+ def delete(self, path: str) -> bool:
153
+ """Delete an object from GCS."""
154
+ key = self._key(path)
155
+ blob = self.bucket.blob(key)
156
+
157
+ try:
158
+ blob.delete()
159
+ return True
160
+ except NotFound:
161
+ return False
162
+ except Exception as e:
163
+ raise StorageError(f"Error deleting {path}: {e}")
164
+
165
+ def list_dir(self, path: str = "") -> List[FileInfo]:
166
+ """List contents of a "directory" in GCS."""
167
+ prefix = self._key(path)
168
+ if prefix and not prefix.endswith('/'):
169
+ prefix += '/'
170
+
171
+ result = []
172
+ seen_dirs = set()
173
+
174
+ try:
175
+ # List with delimiter to get "directories"
176
+ blobs = self.bucket.list_blobs(prefix=prefix, delimiter='/')
177
+
178
+ # Process blobs (files)
179
+ for blob in blobs:
180
+ if blob.name == prefix:
181
+ continue
182
+
183
+ result.append(FileInfo(
184
+ path=self._path(blob.name),
185
+ size=blob.size or 0,
186
+ modified=blob.updated.isoformat() if blob.updated else None,
187
+ is_dir=False
188
+ ))
189
+
190
+ # Process prefixes (directories)
191
+ for dir_prefix in blobs.prefixes:
192
+ dir_name = dir_prefix.rstrip('/').split('/')[-1]
193
+ if dir_name not in seen_dirs:
194
+ seen_dirs.add(dir_name)
195
+ result.append(FileInfo(
196
+ path=self._path(dir_prefix.rstrip('/')),
197
+ size=0,
198
+ is_dir=True
199
+ ))
200
+
201
+ except Exception as e:
202
+ raise StorageError(f"Error listing {path}: {e}")
203
+
204
+ return result
205
+
206
+ def makedirs(self, path: str) -> None:
207
+ """Create a "directory" in GCS (no-op, directories are implicit)."""
208
+ pass
209
+
210
+ def is_dir(self, path: str) -> bool:
211
+ """Check if path is a "directory" in GCS."""
212
+ key = self._key(path)
213
+ if not key:
214
+ return True # Root is always a directory
215
+
216
+ # Check if there are any keys with this prefix
217
+ prefix = key + '/'
218
+ blobs = list(self.bucket.list_blobs(prefix=prefix, max_results=1))
219
+ return len(blobs) > 0
220
+
221
+ def acquire_lock(self, lock_name: str, timeout: int = 30) -> bool:
222
+ """
223
+ Acquire a distributed lock using GCS.
224
+
225
+ Uses generation-based conditional updates for lock safety.
226
+ """
227
+ start_time = time.time()
228
+ lock_key = self._key(f".locks/{lock_name}.lock")
229
+ blob = self.bucket.blob(lock_key)
230
+
231
+ while True:
232
+ try:
233
+ # Check if lock exists and is not stale
234
+ if blob.exists():
235
+ blob.reload()
236
+ existing = blob.download_as_string().decode()
237
+ parts = existing.split(':')
238
+ if len(parts) == 2:
239
+ _, ts = parts
240
+ if int(time.time()) - int(ts) < 300: # Lock is fresh
241
+ if time.time() - start_time >= timeout:
242
+ raise LockError(
243
+ f"Could not acquire lock '{lock_name}' within {timeout}s"
244
+ )
245
+ time.sleep(0.5)
246
+ continue
247
+
248
+ # Create/overwrite lock
249
+ lock_data = f"{self._lock_id}:{int(time.time())}"
250
+ blob.upload_from_string(lock_data)
251
+
252
+ # Verify we own the lock
253
+ time.sleep(0.1)
254
+ blob.reload()
255
+ content = blob.download_as_string().decode()
256
+ if content.startswith(self._lock_id):
257
+ return True
258
+
259
+ # Someone else got it
260
+ if time.time() - start_time >= timeout:
261
+ raise LockError(f"Could not acquire lock '{lock_name}' within {timeout}s")
262
+ time.sleep(0.5)
263
+
264
+ except NotFound:
265
+ # Lock doesn't exist, try to create it
266
+ try:
267
+ lock_data = f"{self._lock_id}:{int(time.time())}"
268
+ blob.upload_from_string(lock_data)
269
+ return True
270
+ except Exception:
271
+ if time.time() - start_time >= timeout:
272
+ raise LockError(f"Could not acquire lock '{lock_name}' within {timeout}s")
273
+ time.sleep(0.5)
274
+ except Exception as e:
275
+ raise StorageError(f"Error acquiring lock: {e}")
276
+
277
+ def release_lock(self, lock_name: str) -> None:
278
+ """Release a distributed lock."""
279
+ lock_key = self._key(f".locks/{lock_name}.lock")
280
+ blob = self.bucket.blob(lock_key)
281
+
282
+ try:
283
+ # Only delete if we own the lock
284
+ if blob.exists():
285
+ content = blob.download_as_string().decode()
286
+ if content.startswith(self._lock_id):
287
+ blob.delete()
288
+ except Exception:
289
+ pass # Ignore errors on release
290
+
291
+ def is_locked(self, lock_name: str) -> bool:
292
+ """Check if a lock is currently held."""
293
+ lock_key = self._key(f".locks/{lock_name}.lock")
294
+ blob = self.bucket.blob(lock_key)
295
+
296
+ try:
297
+ if not blob.exists():
298
+ return False
299
+
300
+ content = blob.download_as_string().decode()
301
+ parts = content.split(':')
302
+ if len(parts) == 2:
303
+ _, ts = parts
304
+ # Lock is valid if less than 5 minutes old
305
+ return int(time.time()) - int(ts) < 300
306
+ return False
307
+ except Exception:
308
+ return False
@@ -0,0 +1,182 @@
1
+ """
2
+ Local filesystem storage adapter for agmem.
3
+ """
4
+
5
+ import os
6
+ import time
7
+ import fcntl
8
+ from pathlib import Path
9
+ from typing import List, Optional
10
+ from datetime import datetime
11
+
12
+ from .base import StorageAdapter, StorageError, LockError, FileInfo
13
+
14
+
15
+ class LocalStorageAdapter(StorageAdapter):
16
+ """Storage adapter for local filesystem."""
17
+
18
+ def __init__(self, root_path: str):
19
+ """
20
+ Initialize local storage adapter.
21
+
22
+ Args:
23
+ root_path: Root directory for storage
24
+ """
25
+ self.root = Path(root_path).resolve()
26
+ self._locks: dict = {} # Active lock file handles
27
+
28
+ def _resolve_path(self, path: str) -> Path:
29
+ """Resolve a relative path to absolute path within root."""
30
+ if not path:
31
+ return self.root
32
+ resolved = (self.root / path).resolve()
33
+ # Security check: ensure path is within root
34
+ if not str(resolved).startswith(str(self.root)):
35
+ raise StorageError(f"Path '{path}' is outside storage root")
36
+ return resolved
37
+
38
+ def read_file(self, path: str) -> bytes:
39
+ """Read a file's contents."""
40
+ resolved = self._resolve_path(path)
41
+ try:
42
+ return resolved.read_bytes()
43
+ except FileNotFoundError:
44
+ raise StorageError(f"File not found: {path}")
45
+ except IOError as e:
46
+ raise StorageError(f"Error reading file {path}: {e}")
47
+
48
+ def write_file(self, path: str, data: bytes) -> None:
49
+ """Write data to a file."""
50
+ resolved = self._resolve_path(path)
51
+ try:
52
+ resolved.parent.mkdir(parents=True, exist_ok=True)
53
+ resolved.write_bytes(data)
54
+ except IOError as e:
55
+ raise StorageError(f"Error writing file {path}: {e}")
56
+
57
+ def exists(self, path: str) -> bool:
58
+ """Check if a path exists."""
59
+ resolved = self._resolve_path(path)
60
+ return resolved.exists()
61
+
62
+ def delete(self, path: str) -> bool:
63
+ """Delete a file."""
64
+ resolved = self._resolve_path(path)
65
+ try:
66
+ if resolved.exists():
67
+ if resolved.is_dir():
68
+ resolved.rmdir()
69
+ else:
70
+ resolved.unlink()
71
+ return True
72
+ return False
73
+ except IOError as e:
74
+ raise StorageError(f"Error deleting {path}: {e}")
75
+
76
+ def list_dir(self, path: str = "") -> List[FileInfo]:
77
+ """List contents of a directory."""
78
+ resolved = self._resolve_path(path)
79
+
80
+ if not resolved.exists():
81
+ return []
82
+
83
+ if not resolved.is_dir():
84
+ raise StorageError(f"Not a directory: {path}")
85
+
86
+ result = []
87
+ for item in resolved.iterdir():
88
+ try:
89
+ stat = item.stat()
90
+ rel_path = str(item.relative_to(self.root))
91
+
92
+ result.append(FileInfo(
93
+ path=rel_path,
94
+ size=stat.st_size if not item.is_dir() else 0,
95
+ modified=datetime.fromtimestamp(stat.st_mtime).isoformat(),
96
+ is_dir=item.is_dir()
97
+ ))
98
+ except IOError:
99
+ # Skip files we can't stat
100
+ continue
101
+
102
+ return result
103
+
104
+ def makedirs(self, path: str) -> None:
105
+ """Create directory and any necessary parent directories."""
106
+ resolved = self._resolve_path(path)
107
+ resolved.mkdir(parents=True, exist_ok=True)
108
+
109
+ def is_dir(self, path: str) -> bool:
110
+ """Check if path is a directory."""
111
+ resolved = self._resolve_path(path)
112
+ return resolved.is_dir()
113
+
114
+ def acquire_lock(self, lock_name: str, timeout: int = 30) -> bool:
115
+ """
116
+ Acquire a file-based lock.
117
+
118
+ Uses fcntl for POSIX systems.
119
+ """
120
+ lock_path = self.root / '.locks' / f'{lock_name}.lock'
121
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
122
+
123
+ start_time = time.time()
124
+
125
+ while True:
126
+ try:
127
+ # Open or create lock file
128
+ lock_file = open(lock_path, 'w')
129
+
130
+ # Try to acquire exclusive lock
131
+ fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
132
+
133
+ # Write our PID to the lock file
134
+ lock_file.write(str(os.getpid()))
135
+ lock_file.flush()
136
+
137
+ # Keep handle open to maintain lock
138
+ self._locks[lock_name] = lock_file
139
+ return True
140
+
141
+ except (IOError, OSError):
142
+ # Lock is held by another process
143
+ if time.time() - start_time >= timeout:
144
+ raise LockError(f"Could not acquire lock '{lock_name}' within {timeout}s")
145
+ time.sleep(0.1)
146
+
147
+ def release_lock(self, lock_name: str) -> None:
148
+ """Release a file-based lock."""
149
+ if lock_name in self._locks:
150
+ lock_file = self._locks.pop(lock_name)
151
+ try:
152
+ fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN)
153
+ lock_file.close()
154
+ except (IOError, OSError):
155
+ pass
156
+
157
+ # Try to remove lock file
158
+ lock_path = self.root / '.locks' / f'{lock_name}.lock'
159
+ try:
160
+ lock_path.unlink()
161
+ except (IOError, OSError):
162
+ pass
163
+
164
+ def is_locked(self, lock_name: str) -> bool:
165
+ """Check if a lock is currently held."""
166
+ lock_path = self.root / '.locks' / f'{lock_name}.lock'
167
+
168
+ if not lock_path.exists():
169
+ return False
170
+
171
+ try:
172
+ # Try to acquire lock briefly
173
+ with open(lock_path, 'w') as f:
174
+ fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
175
+ fcntl.flock(f.fileno(), fcntl.LOCK_UN)
176
+ return False # Lock is free
177
+ except (IOError, OSError):
178
+ return True # Lock is held
179
+
180
+ def get_root(self) -> Path:
181
+ """Get the root path of this storage."""
182
+ return self.root