agmem 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/METADATA +20 -3
  2. agmem-0.1.2.dist-info/RECORD +86 -0
  3. memvcs/__init__.py +1 -1
  4. memvcs/cli.py +35 -31
  5. memvcs/commands/__init__.py +9 -9
  6. memvcs/commands/add.py +77 -76
  7. memvcs/commands/blame.py +46 -53
  8. memvcs/commands/branch.py +13 -33
  9. memvcs/commands/checkout.py +27 -32
  10. memvcs/commands/clean.py +18 -23
  11. memvcs/commands/clone.py +4 -1
  12. memvcs/commands/commit.py +40 -39
  13. memvcs/commands/daemon.py +81 -76
  14. memvcs/commands/decay.py +77 -0
  15. memvcs/commands/diff.py +56 -57
  16. memvcs/commands/distill.py +74 -0
  17. memvcs/commands/fsck.py +55 -61
  18. memvcs/commands/garden.py +28 -37
  19. memvcs/commands/graph.py +41 -48
  20. memvcs/commands/init.py +16 -24
  21. memvcs/commands/log.py +25 -40
  22. memvcs/commands/merge.py +16 -28
  23. memvcs/commands/pack.py +129 -0
  24. memvcs/commands/pull.py +4 -1
  25. memvcs/commands/push.py +4 -2
  26. memvcs/commands/recall.py +145 -0
  27. memvcs/commands/reflog.py +13 -22
  28. memvcs/commands/remote.py +1 -0
  29. memvcs/commands/repair.py +66 -0
  30. memvcs/commands/reset.py +23 -33
  31. memvcs/commands/resurrect.py +82 -0
  32. memvcs/commands/search.py +3 -4
  33. memvcs/commands/serve.py +2 -1
  34. memvcs/commands/show.py +66 -36
  35. memvcs/commands/stash.py +34 -34
  36. memvcs/commands/status.py +27 -35
  37. memvcs/commands/tag.py +23 -47
  38. memvcs/commands/test.py +30 -44
  39. memvcs/commands/timeline.py +111 -0
  40. memvcs/commands/tree.py +26 -27
  41. memvcs/commands/verify.py +59 -0
  42. memvcs/commands/when.py +115 -0
  43. memvcs/core/access_index.py +167 -0
  44. memvcs/core/config_loader.py +3 -1
  45. memvcs/core/consistency.py +214 -0
  46. memvcs/core/decay.py +185 -0
  47. memvcs/core/diff.py +158 -143
  48. memvcs/core/distiller.py +277 -0
  49. memvcs/core/gardener.py +164 -132
  50. memvcs/core/hooks.py +48 -14
  51. memvcs/core/knowledge_graph.py +134 -138
  52. memvcs/core/merge.py +248 -171
  53. memvcs/core/objects.py +95 -96
  54. memvcs/core/pii_scanner.py +147 -146
  55. memvcs/core/refs.py +132 -115
  56. memvcs/core/repository.py +174 -164
  57. memvcs/core/schema.py +155 -113
  58. memvcs/core/staging.py +60 -65
  59. memvcs/core/storage/__init__.py +20 -18
  60. memvcs/core/storage/base.py +74 -70
  61. memvcs/core/storage/gcs.py +70 -68
  62. memvcs/core/storage/local.py +42 -40
  63. memvcs/core/storage/s3.py +105 -110
  64. memvcs/core/temporal_index.py +112 -0
  65. memvcs/core/test_runner.py +101 -93
  66. memvcs/core/vector_store.py +41 -35
  67. memvcs/integrations/mcp_server.py +1 -3
  68. memvcs/integrations/web_ui/server.py +25 -26
  69. memvcs/retrieval/__init__.py +22 -0
  70. memvcs/retrieval/base.py +54 -0
  71. memvcs/retrieval/pack.py +128 -0
  72. memvcs/retrieval/recaller.py +105 -0
  73. memvcs/retrieval/strategies.py +314 -0
  74. memvcs/utils/__init__.py +3 -3
  75. memvcs/utils/helpers.py +52 -52
  76. agmem-0.1.1.dist-info/RECORD +0 -67
  77. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/WHEEL +0 -0
  78. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/entry_points.txt +0 -0
  79. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/licenses/LICENSE +0 -0
  80. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/top_level.txt +0 -0
memvcs/core/staging.py CHANGED
@@ -15,6 +15,7 @@ from dataclasses import dataclass, asdict
15
15
  @dataclass
16
16
  class StagedFile:
17
17
  """Represents a file in the staging area."""
18
+
18
19
  path: str # Relative path from current/
19
20
  blob_hash: str
20
21
  mode: int = 0o100644 # Regular file
@@ -35,14 +36,14 @@ def _path_under_root(relative_path: str, root: Path) -> Optional[Path]:
35
36
 
36
37
  class StagingArea:
37
38
  """Manages the staging area for memory commits."""
38
-
39
+
39
40
  def __init__(self, mem_dir: Path):
40
41
  self.mem_dir = Path(mem_dir)
41
- self.staging_dir = self.mem_dir / 'staging'
42
- self.index_file = self.mem_dir / 'index.json'
42
+ self.staging_dir = self.mem_dir / "staging"
43
+ self.index_file = self.mem_dir / "index.json"
43
44
  self._index: Dict[str, StagedFile] = {}
44
45
  self._load_index()
45
-
46
+
46
47
  def _load_index(self):
47
48
  """Load the staging index from disk."""
48
49
  if self.index_file.exists():
@@ -52,72 +53,62 @@ class StagingArea:
52
53
  if _path_under_root(path, self.staging_dir) is None:
53
54
  continue
54
55
  self._index[path] = StagedFile(
55
- path=path,
56
- blob_hash=info['blob_hash'],
57
- mode=info.get('mode', 0o100644)
56
+ path=path, blob_hash=info["blob_hash"], mode=info.get("mode", 0o100644)
58
57
  )
59
58
  except (json.JSONDecodeError, KeyError):
60
59
  self._index = {}
61
-
60
+
62
61
  def _save_index(self):
63
62
  """Save the staging index to disk."""
64
63
  data = {
65
- path: {
66
- 'blob_hash': sf.blob_hash,
67
- 'mode': sf.mode
68
- }
69
- for path, sf in self._index.items()
64
+ path: {"blob_hash": sf.blob_hash, "mode": sf.mode} for path, sf in self._index.items()
70
65
  }
71
66
  self.index_file.write_text(json.dumps(data, indent=2))
72
-
67
+
73
68
  def add(self, filepath: str, blob_hash: str, content: bytes, mode: int = 0o100644):
74
69
  """
75
70
  Add a file to the staging area.
76
-
71
+
77
72
  Args:
78
73
  filepath: Relative path from current/
79
74
  blob_hash: Hash of the blob object
80
75
  content: File content bytes
81
76
  mode: File mode (default 0o100644 for regular file)
82
-
77
+
83
78
  Raises:
84
79
  ValueError: If filepath escapes staging directory (path traversal)
85
80
  """
86
81
  staging_path = _path_under_root(filepath, self.staging_dir)
87
82
  if staging_path is None:
88
83
  raise ValueError(f"Path escapes staging area: {filepath}")
89
-
90
- self._index[filepath] = StagedFile(
91
- path=filepath,
92
- blob_hash=blob_hash,
93
- mode=mode
94
- )
95
-
84
+
85
+ self._index[filepath] = StagedFile(path=filepath, blob_hash=blob_hash, mode=mode)
86
+
96
87
  staging_path.parent.mkdir(parents=True, exist_ok=True)
97
88
  staging_path.write_bytes(content)
98
-
89
+
99
90
  self._save_index()
100
-
91
+
101
92
  def remove(self, filepath: str) -> bool:
102
93
  """
103
94
  Remove a file from the staging area.
104
-
95
+
105
96
  Returns:
106
97
  True if file was in staging, False otherwise
107
98
  """
108
99
  if filepath in self._index:
109
100
  del self._index[filepath]
110
-
101
+
111
102
  staging_path = _path_under_root(filepath, self.staging_dir)
112
103
  if staging_path is not None and staging_path.exists():
113
104
  staging_path.unlink()
114
105
  # Clean up empty directories
115
106
  self._cleanup_empty_dirs(staging_path.parent)
116
-
107
+
117
108
  self._save_index()
118
109
  return True
119
110
  return False
120
-
111
+
121
112
  def _cleanup_empty_dirs(self, dir_path: Path):
122
113
  """Remove empty directories up to staging root."""
123
114
  try:
@@ -129,99 +120,103 @@ class StagingArea:
129
120
  break
130
121
  except OSError:
131
122
  pass
132
-
123
+
133
124
  def get_staged_files(self) -> Dict[str, StagedFile]:
134
125
  """Get all staged files."""
135
126
  return dict(self._index)
136
-
127
+
137
128
  def is_staged(self, filepath: str) -> bool:
138
129
  """Check if a file is staged."""
139
130
  return filepath in self._index
140
-
131
+
141
132
  def get_blob_hash(self, filepath: str) -> Optional[str]:
142
133
  """Get the blob hash for a staged file."""
143
134
  if filepath in self._index:
144
135
  return self._index[filepath].blob_hash
145
136
  return None
146
-
137
+
147
138
  def clear(self):
148
139
  """Clear the entire staging area."""
149
140
  self._index = {}
150
-
141
+
151
142
  # Remove staging directory contents
152
143
  if self.staging_dir.exists():
153
144
  shutil.rmtree(self.staging_dir)
154
145
  self.staging_dir.mkdir(parents=True, exist_ok=True)
155
-
146
+
156
147
  # Remove index file
157
148
  if self.index_file.exists():
158
149
  self.index_file.unlink()
159
-
150
+
160
151
  def get_status(self) -> Dict[str, List[str]]:
161
152
  """
162
153
  Get staging status.
163
-
154
+
164
155
  Returns:
165
156
  Dict with 'staged', 'modified', 'deleted', 'untracked' lists
166
157
  """
167
158
  staged = list(self._index.keys())
168
-
159
+
169
160
  return {
170
- 'staged': staged,
171
- 'modified': [], # TODO: Compare with working directory
172
- 'deleted': [], # TODO: Check if files were deleted
173
- 'untracked': [] # TODO: Find untracked files
161
+ "staged": staged,
162
+ "modified": [], # TODO: Compare with working directory
163
+ "deleted": [], # TODO: Check if files were deleted
164
+ "untracked": [], # TODO: Find untracked files
174
165
  }
175
-
166
+
176
167
  def get_tree_entries(self) -> List[Dict]:
177
168
  """
178
169
  Get tree entries for creating a tree object.
179
-
170
+
180
171
  Returns:
181
172
  List of entry dictionaries for Tree creation
182
173
  """
183
174
  entries = []
184
175
  for path, sf in self._index.items():
185
- entries.append({
186
- 'mode': oct(sf.mode)[2:], # Convert to string like '100644'
187
- 'type': 'blob',
188
- 'hash': sf.blob_hash,
189
- 'name': Path(path).name,
190
- 'path': str(Path(path).parent) if str(Path(path).parent) != '.' else ''
191
- })
176
+ entries.append(
177
+ {
178
+ "mode": oct(sf.mode)[2:], # Convert to string like '100644'
179
+ "type": "blob",
180
+ "hash": sf.blob_hash,
181
+ "name": Path(path).name,
182
+ "path": str(Path(path).parent) if str(Path(path).parent) != "." else "",
183
+ }
184
+ )
192
185
  return entries
193
-
186
+
194
187
  def diff_with_head(self, repo) -> Dict[str, Dict]:
195
188
  """
196
189
  Compare staging area with HEAD commit.
197
-
190
+
198
191
  Returns:
199
192
  Dict mapping file paths to change info
200
193
  """
201
194
  changes = {}
202
-
195
+
203
196
  # Get HEAD tree
204
197
  head_commit = repo.get_head_commit()
205
198
  if head_commit:
206
- head_tree_bytes = repo.object_store.retrieve(head_commit.tree, 'tree')
199
+ head_tree_bytes = repo.object_store.retrieve(head_commit.tree, "tree")
207
200
  if head_tree_bytes:
208
- head_data = json.loads(head_tree_bytes.decode('utf-8'))
209
- head_entries = {e['path'] + '/' + e['name'] if e['path'] else e['name']: e
210
- for e in head_data.get('entries', [])}
201
+ head_data = json.loads(head_tree_bytes.decode("utf-8"))
202
+ head_entries = {
203
+ e["path"] + "/" + e["name"] if e["path"] else e["name"]: e
204
+ for e in head_data.get("entries", [])
205
+ }
211
206
  else:
212
207
  head_entries = {}
213
-
208
+
214
209
  # Compare with staging
215
210
  for path, sf in self._index.items():
216
211
  if path in head_entries:
217
- if head_entries[path]['hash'] != sf.blob_hash:
218
- changes[path] = {'status': 'modified', 'blob_hash': sf.blob_hash}
212
+ if head_entries[path]["hash"] != sf.blob_hash:
213
+ changes[path] = {"status": "modified", "blob_hash": sf.blob_hash}
219
214
  else:
220
- changes[path] = {'status': 'added', 'blob_hash': sf.blob_hash}
221
-
215
+ changes[path] = {"status": "added", "blob_hash": sf.blob_hash}
216
+
222
217
  # Check for deleted files
223
218
  for path in head_entries:
224
219
  if path not in self._index:
225
- changes[path] = {'status': 'deleted'}
226
-
220
+ changes[path] = {"status": "deleted"}
221
+
227
222
  return changes
@@ -10,22 +10,24 @@ from .base import StorageAdapter, StorageError, LockError
10
10
  from .local import LocalStorageAdapter
11
11
 
12
12
  __all__ = [
13
- 'StorageAdapter',
14
- 'StorageError',
15
- 'LockError',
16
- 'LocalStorageAdapter',
13
+ "StorageAdapter",
14
+ "StorageError",
15
+ "LockError",
16
+ "LocalStorageAdapter",
17
17
  ]
18
18
 
19
19
  # Try to import optional cloud adapters
20
20
  try:
21
21
  from .s3 import S3StorageAdapter
22
- __all__.append('S3StorageAdapter')
22
+
23
+ __all__.append("S3StorageAdapter")
23
24
  except ImportError:
24
25
  pass
25
26
 
26
27
  try:
27
28
  from .gcs import GCSStorageAdapter
28
- __all__.append('GCSStorageAdapter')
29
+
30
+ __all__.append("GCSStorageAdapter")
29
31
  except ImportError:
30
32
  pass
31
33
 
@@ -33,40 +35,40 @@ except ImportError:
33
35
  def get_adapter(url: str, config: Optional[dict] = None) -> StorageAdapter:
34
36
  """
35
37
  Get the appropriate storage adapter for a URL.
36
-
38
+
37
39
  Args:
38
40
  url: Storage URL (file://, s3://, gs://)
39
41
  config: Optional agmem config dict (from load_agmem_config). Used for
40
42
  S3/GCS credentials and options; credentials resolved from env only.
41
-
43
+
42
44
  Returns:
43
45
  Appropriate StorageAdapter instance
44
-
46
+
45
47
  Raises:
46
48
  ValueError: If URL scheme is not supported
47
49
  """
48
- if url.startswith('file://'):
50
+ if url.startswith("file://"):
49
51
  path = url[7:] # Remove 'file://' prefix
50
52
  return LocalStorageAdapter(path)
51
-
52
- elif url.startswith('s3://'):
53
+
54
+ elif url.startswith("s3://"):
53
55
  try:
54
56
  from .s3 import S3StorageAdapter
57
+
55
58
  return S3StorageAdapter.from_url(url, config=config)
56
59
  except ImportError:
57
- raise ImportError(
58
- "S3 storage requires boto3. Install with: pip install agmem[cloud]"
59
- )
60
-
61
- elif url.startswith('gs://'):
60
+ raise ImportError("S3 storage requires boto3. Install with: pip install agmem[cloud]")
61
+
62
+ elif url.startswith("gs://"):
62
63
  try:
63
64
  from .gcs import GCSStorageAdapter
65
+
64
66
  return GCSStorageAdapter.from_url(url, config=config)
65
67
  except ImportError:
66
68
  raise ImportError(
67
69
  "GCS storage requires google-cloud-storage. Install with: pip install agmem[cloud]"
68
70
  )
69
-
71
+
70
72
  else:
71
73
  # Assume local path
72
74
  return LocalStorageAdapter(url)