agmem 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/METADATA +20 -3
  2. agmem-0.1.2.dist-info/RECORD +86 -0
  3. memvcs/__init__.py +1 -1
  4. memvcs/cli.py +35 -31
  5. memvcs/commands/__init__.py +9 -9
  6. memvcs/commands/add.py +77 -76
  7. memvcs/commands/blame.py +46 -53
  8. memvcs/commands/branch.py +13 -33
  9. memvcs/commands/checkout.py +27 -32
  10. memvcs/commands/clean.py +18 -23
  11. memvcs/commands/clone.py +4 -1
  12. memvcs/commands/commit.py +40 -39
  13. memvcs/commands/daemon.py +81 -76
  14. memvcs/commands/decay.py +77 -0
  15. memvcs/commands/diff.py +56 -57
  16. memvcs/commands/distill.py +74 -0
  17. memvcs/commands/fsck.py +55 -61
  18. memvcs/commands/garden.py +28 -37
  19. memvcs/commands/graph.py +41 -48
  20. memvcs/commands/init.py +16 -24
  21. memvcs/commands/log.py +25 -40
  22. memvcs/commands/merge.py +16 -28
  23. memvcs/commands/pack.py +129 -0
  24. memvcs/commands/pull.py +4 -1
  25. memvcs/commands/push.py +4 -2
  26. memvcs/commands/recall.py +145 -0
  27. memvcs/commands/reflog.py +13 -22
  28. memvcs/commands/remote.py +1 -0
  29. memvcs/commands/repair.py +66 -0
  30. memvcs/commands/reset.py +23 -33
  31. memvcs/commands/resurrect.py +82 -0
  32. memvcs/commands/search.py +3 -4
  33. memvcs/commands/serve.py +2 -1
  34. memvcs/commands/show.py +66 -36
  35. memvcs/commands/stash.py +34 -34
  36. memvcs/commands/status.py +27 -35
  37. memvcs/commands/tag.py +23 -47
  38. memvcs/commands/test.py +30 -44
  39. memvcs/commands/timeline.py +111 -0
  40. memvcs/commands/tree.py +26 -27
  41. memvcs/commands/verify.py +59 -0
  42. memvcs/commands/when.py +115 -0
  43. memvcs/core/access_index.py +167 -0
  44. memvcs/core/config_loader.py +3 -1
  45. memvcs/core/consistency.py +214 -0
  46. memvcs/core/decay.py +185 -0
  47. memvcs/core/diff.py +158 -143
  48. memvcs/core/distiller.py +277 -0
  49. memvcs/core/gardener.py +164 -132
  50. memvcs/core/hooks.py +48 -14
  51. memvcs/core/knowledge_graph.py +134 -138
  52. memvcs/core/merge.py +248 -171
  53. memvcs/core/objects.py +95 -96
  54. memvcs/core/pii_scanner.py +147 -146
  55. memvcs/core/refs.py +132 -115
  56. memvcs/core/repository.py +174 -164
  57. memvcs/core/schema.py +155 -113
  58. memvcs/core/staging.py +60 -65
  59. memvcs/core/storage/__init__.py +20 -18
  60. memvcs/core/storage/base.py +74 -70
  61. memvcs/core/storage/gcs.py +70 -68
  62. memvcs/core/storage/local.py +42 -40
  63. memvcs/core/storage/s3.py +105 -110
  64. memvcs/core/temporal_index.py +112 -0
  65. memvcs/core/test_runner.py +101 -93
  66. memvcs/core/vector_store.py +41 -35
  67. memvcs/integrations/mcp_server.py +1 -3
  68. memvcs/integrations/web_ui/server.py +25 -26
  69. memvcs/retrieval/__init__.py +22 -0
  70. memvcs/retrieval/base.py +54 -0
  71. memvcs/retrieval/pack.py +128 -0
  72. memvcs/retrieval/recaller.py +105 -0
  73. memvcs/retrieval/strategies.py +314 -0
  74. memvcs/utils/__init__.py +3 -3
  75. memvcs/utils/helpers.py +52 -52
  76. agmem-0.1.1.dist-info/RECORD +0 -67
  77. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/WHEEL +0 -0
  78. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/entry_points.txt +0 -0
  79. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/licenses/LICENSE +0 -0
  80. {agmem-0.1.1.dist-info → agmem-0.1.2.dist-info}/top_level.txt +0 -0
memvcs/core/objects.py CHANGED
@@ -18,21 +18,21 @@ def _valid_object_hash(hash_id: str) -> bool:
18
18
  """Return True if hash_id is safe for object paths (hex, 4-64 chars)."""
19
19
  if not hash_id or len(hash_id) < 4 or len(hash_id) > 64:
20
20
  return False
21
- return all(c in '0123456789abcdef' for c in hash_id.lower())
21
+ return all(c in "0123456789abcdef" for c in hash_id.lower())
22
22
 
23
23
 
24
24
  class ObjectStore:
25
25
  """Content-addressable object storage system."""
26
-
26
+
27
27
  def __init__(self, objects_dir: Path):
28
28
  self.objects_dir = Path(objects_dir)
29
29
  self._ensure_directories()
30
-
30
+
31
31
  def _ensure_directories(self):
32
32
  """Create object storage directories."""
33
- for obj_type in ['blob', 'tree', 'commit', 'tag']:
33
+ for obj_type in ["blob", "tree", "commit", "tag"]:
34
34
  (self.objects_dir / obj_type).mkdir(parents=True, exist_ok=True)
35
-
35
+
36
36
  def _get_object_path(self, hash_id: str, obj_type: str) -> Path:
37
37
  """Get storage path for an object. Validates hash_id to prevent path traversal."""
38
38
  if not _valid_object_hash(hash_id):
@@ -40,76 +40,76 @@ class ObjectStore:
40
40
  prefix = hash_id[:2]
41
41
  suffix = hash_id[2:]
42
42
  return self.objects_dir / obj_type / prefix / suffix
43
-
43
+
44
44
  def _compute_hash(self, content: bytes, obj_type: str) -> str:
45
45
  """Compute SHA-256 hash of content with type header."""
46
46
  header = f"{obj_type} {len(content)}\0".encode()
47
47
  full_content = header + content
48
48
  return hashlib.sha256(full_content).hexdigest()
49
-
49
+
50
50
  def store(self, content: bytes, obj_type: str) -> str:
51
51
  """
52
52
  Store content and return its hash ID.
53
-
53
+
54
54
  Args:
55
55
  content: Raw bytes to store
56
56
  obj_type: Type of object ('blob', 'tree', 'commit', 'tag')
57
-
57
+
58
58
  Returns:
59
59
  SHA-256 hash ID of stored object
60
60
  """
61
61
  hash_id = self._compute_hash(content, obj_type)
62
62
  obj_path = self._get_object_path(hash_id, obj_type)
63
-
63
+
64
64
  # Don't store if already exists (deduplication)
65
65
  if obj_path.exists():
66
66
  return hash_id
67
-
67
+
68
68
  # Create directory if needed
69
69
  obj_path.parent.mkdir(parents=True, exist_ok=True)
70
-
70
+
71
71
  # Compress and store
72
72
  header = f"{obj_type} {len(content)}\0".encode()
73
73
  full_content = header + content
74
74
  compressed = zlib.compress(full_content)
75
-
75
+
76
76
  obj_path.write_bytes(compressed)
77
77
  return hash_id
78
-
78
+
79
79
  def retrieve(self, hash_id: str, obj_type: str) -> Optional[bytes]:
80
80
  """
81
81
  Retrieve content by hash ID.
82
-
82
+
83
83
  Args:
84
84
  hash_id: SHA-256 hash of the object
85
85
  obj_type: Type of object
86
-
86
+
87
87
  Returns:
88
88
  Raw bytes content or None if not found
89
89
  """
90
90
  obj_path = self._get_object_path(hash_id, obj_type)
91
-
91
+
92
92
  if not obj_path.exists():
93
93
  return None
94
-
94
+
95
95
  # Decompress and extract content
96
96
  compressed = obj_path.read_bytes()
97
97
  full_content = zlib.decompress(compressed)
98
-
98
+
99
99
  # Parse header
100
- null_idx = full_content.index(b'\0')
100
+ null_idx = full_content.index(b"\0")
101
101
  header = full_content[:null_idx].decode()
102
- content = full_content[null_idx + 1:]
103
-
102
+ content = full_content[null_idx + 1 :]
103
+
104
104
  return content
105
-
105
+
106
106
  def exists(self, hash_id: str, obj_type: str) -> bool:
107
107
  """Check if an object exists. Returns False for invalid hash (no raise)."""
108
108
  if not _valid_object_hash(hash_id):
109
109
  return False
110
110
  obj_path = self._get_object_path(hash_id, obj_type)
111
111
  return obj_path.exists()
112
-
112
+
113
113
  def delete(self, hash_id: str, obj_type: str) -> bool:
114
114
  """Delete an object. Returns True if deleted, False if not found."""
115
115
  obj_path = self._get_object_path(hash_id, obj_type)
@@ -120,13 +120,13 @@ class ObjectStore:
120
120
  obj_path.parent.rmdir()
121
121
  return True
122
122
  return False
123
-
123
+
124
124
  def list_objects(self, obj_type: str) -> List[str]:
125
125
  """List all objects of a given type."""
126
126
  obj_dir = self.objects_dir / obj_type
127
127
  if not obj_dir.exists():
128
128
  return []
129
-
129
+
130
130
  hashes = []
131
131
  for prefix_dir in obj_dir.iterdir():
132
132
  if prefix_dir.is_dir():
@@ -134,7 +134,7 @@ class ObjectStore:
134
134
  hash_id = prefix_dir.name + suffix_file.name
135
135
  hashes.append(hash_id)
136
136
  return hashes
137
-
137
+
138
138
  def get_size(self, hash_id: str, obj_type: str) -> int:
139
139
  """Get the compressed size of an object."""
140
140
  obj_path = self._get_object_path(hash_id, obj_type)
@@ -146,16 +146,17 @@ class ObjectStore:
146
146
  @dataclass
147
147
  class Blob:
148
148
  """Blob object for storing raw memory content."""
149
+
149
150
  content: bytes
150
-
151
+
151
152
  def store(self, store: ObjectStore) -> str:
152
153
  """Store this blob and return its hash."""
153
- return store.store(self.content, 'blob')
154
-
154
+ return store.store(self.content, "blob")
155
+
155
156
  @staticmethod
156
- def load(store: ObjectStore, hash_id: str) -> Optional['Blob']:
157
+ def load(store: ObjectStore, hash_id: str) -> Optional["Blob"]:
157
158
  """Load a blob from storage."""
158
- content = store.retrieve(hash_id, 'blob')
159
+ content = store.retrieve(hash_id, "blob")
159
160
  if content is not None:
160
161
  return Blob(content=content)
161
162
  return None
@@ -164,6 +165,7 @@ class Blob:
164
165
  @dataclass
165
166
  class TreeEntry:
166
167
  """Entry in a tree object."""
168
+
167
169
  mode: str # '100644' for file, '040000' for directory
168
170
  obj_type: str # 'blob' or 'tree'
169
171
  hash: str
@@ -174,52 +176,47 @@ class TreeEntry:
174
176
  @dataclass
175
177
  class Tree:
176
178
  """Tree object for storing directory structure."""
179
+
177
180
  entries: List[TreeEntry]
178
-
181
+
179
182
  def to_dict(self) -> Dict[str, Any]:
180
183
  """Convert to dictionary for serialization."""
181
184
  return {
182
- 'type': 'tree',
183
- 'entries': [
184
- {
185
- 'mode': e.mode,
186
- 'type': e.obj_type,
187
- 'hash': e.hash,
188
- 'name': e.name,
189
- 'path': e.path
190
- }
185
+ "type": "tree",
186
+ "entries": [
187
+ {"mode": e.mode, "type": e.obj_type, "hash": e.hash, "name": e.name, "path": e.path}
191
188
  for e in self.entries
192
- ]
189
+ ],
193
190
  }
194
-
191
+
195
192
  def to_bytes(self) -> bytes:
196
193
  """Serialize to bytes."""
197
194
  return json.dumps(self.to_dict(), sort_keys=True).encode()
198
-
195
+
199
196
  def store(self, store: ObjectStore) -> str:
200
197
  """Store this tree and return its hash."""
201
- return store.store(self.to_bytes(), 'tree')
202
-
198
+ return store.store(self.to_bytes(), "tree")
199
+
203
200
  @staticmethod
204
- def load(store: ObjectStore, hash_id: str) -> Optional['Tree']:
201
+ def load(store: ObjectStore, hash_id: str) -> Optional["Tree"]:
205
202
  """Load a tree from storage."""
206
- content = store.retrieve(hash_id, 'tree')
203
+ content = store.retrieve(hash_id, "tree")
207
204
  if content is None:
208
205
  return None
209
-
206
+
210
207
  data = json.loads(content)
211
208
  entries = [
212
209
  TreeEntry(
213
- mode=e['mode'],
214
- obj_type=e['type'],
215
- hash=e['hash'],
216
- name=e['name'],
217
- path=e.get('path', '')
210
+ mode=e["mode"],
211
+ obj_type=e["type"],
212
+ hash=e["hash"],
213
+ name=e["name"],
214
+ path=e.get("path", ""),
218
215
  )
219
- for e in data.get('entries', [])
216
+ for e in data.get("entries", [])
220
217
  ]
221
218
  return Tree(entries=entries)
222
-
219
+
223
220
  def get_entry(self, name: str) -> Optional[TreeEntry]:
224
221
  """Get an entry by name."""
225
222
  for entry in self.entries:
@@ -231,50 +228,51 @@ class Tree:
231
228
  @dataclass
232
229
  class Commit:
233
230
  """Commit object for storing memory snapshots."""
231
+
234
232
  tree: str # Hash of tree object
235
233
  parents: List[str] # Hashes of parent commits
236
234
  author: str
237
235
  timestamp: str
238
236
  message: str
239
237
  metadata: Dict[str, Any] # Additional metadata
240
-
238
+
241
239
  def to_dict(self) -> Dict[str, Any]:
242
240
  """Convert to dictionary for serialization."""
243
241
  return {
244
- 'type': 'commit',
245
- 'tree': self.tree,
246
- 'parents': self.parents,
247
- 'author': self.author,
248
- 'timestamp': self.timestamp,
249
- 'message': self.message,
250
- 'metadata': self.metadata
242
+ "type": "commit",
243
+ "tree": self.tree,
244
+ "parents": self.parents,
245
+ "author": self.author,
246
+ "timestamp": self.timestamp,
247
+ "message": self.message,
248
+ "metadata": self.metadata,
251
249
  }
252
-
250
+
253
251
  def to_bytes(self) -> bytes:
254
252
  """Serialize to bytes."""
255
253
  return json.dumps(self.to_dict(), sort_keys=True).encode()
256
-
254
+
257
255
  def store(self, store: ObjectStore) -> str:
258
256
  """Store this commit and return its hash."""
259
- return store.store(self.to_bytes(), 'commit')
260
-
257
+ return store.store(self.to_bytes(), "commit")
258
+
261
259
  @staticmethod
262
- def load(store: ObjectStore, hash_id: str) -> Optional['Commit']:
260
+ def load(store: ObjectStore, hash_id: str) -> Optional["Commit"]:
263
261
  """Load a commit from storage."""
264
- content = store.retrieve(hash_id, 'commit')
262
+ content = store.retrieve(hash_id, "commit")
265
263
  if content is None:
266
264
  return None
267
-
265
+
268
266
  data = json.loads(content)
269
267
  return Commit(
270
- tree=data['tree'],
271
- parents=data.get('parents', []),
272
- author=data['author'],
273
- timestamp=data['timestamp'],
274
- message=data['message'],
275
- metadata=data.get('metadata', {})
268
+ tree=data["tree"],
269
+ parents=data.get("parents", []),
270
+ author=data["author"],
271
+ timestamp=data["timestamp"],
272
+ message=data["message"],
273
+ metadata=data.get("metadata", {}),
276
274
  )
277
-
275
+
278
276
  def short_hash(self, store: ObjectStore) -> str:
279
277
  """Get short hash for display."""
280
278
  full_hash = self.store(store)
@@ -284,40 +282,41 @@ class Commit:
284
282
  @dataclass
285
283
  class Tag:
286
284
  """Tag object for marking specific commits."""
285
+
287
286
  name: str
288
287
  commit_hash: str
289
288
  message: str
290
289
  timestamp: str
291
-
290
+
292
291
  def to_dict(self) -> Dict[str, Any]:
293
292
  """Convert to dictionary for serialization."""
294
293
  return {
295
- 'type': 'tag',
296
- 'name': self.name,
297
- 'commit_hash': self.commit_hash,
298
- 'message': self.message,
299
- 'timestamp': self.timestamp
294
+ "type": "tag",
295
+ "name": self.name,
296
+ "commit_hash": self.commit_hash,
297
+ "message": self.message,
298
+ "timestamp": self.timestamp,
300
299
  }
301
-
300
+
302
301
  def to_bytes(self) -> bytes:
303
302
  """Serialize to bytes."""
304
303
  return json.dumps(self.to_dict(), sort_keys=True).encode()
305
-
304
+
306
305
  def store(self, store: ObjectStore) -> str:
307
306
  """Store this tag and return its hash."""
308
- return store.store(self.to_bytes(), 'tag')
309
-
307
+ return store.store(self.to_bytes(), "tag")
308
+
310
309
  @staticmethod
311
- def load(store: ObjectStore, hash_id: str) -> Optional['Tag']:
310
+ def load(store: ObjectStore, hash_id: str) -> Optional["Tag"]:
312
311
  """Load a tag from storage."""
313
- content = store.retrieve(hash_id, 'tag')
312
+ content = store.retrieve(hash_id, "tag")
314
313
  if content is None:
315
314
  return None
316
-
315
+
317
316
  data = json.loads(content)
318
317
  return Tag(
319
- name=data['name'],
320
- commit_hash=data['commit_hash'],
321
- message=data['message'],
322
- timestamp=data['timestamp']
318
+ name=data["name"],
319
+ commit_hash=data["commit_hash"],
320
+ message=data["message"],
321
+ timestamp=data["timestamp"],
323
322
  )