agmem 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {agmem-0.1.1.dist-info → agmem-0.1.3.dist-info}/METADATA +157 -16
- agmem-0.1.3.dist-info/RECORD +105 -0
- memvcs/__init__.py +1 -1
- memvcs/cli.py +45 -31
- memvcs/commands/__init__.py +9 -9
- memvcs/commands/add.py +83 -76
- memvcs/commands/audit.py +59 -0
- memvcs/commands/blame.py +46 -53
- memvcs/commands/branch.py +13 -33
- memvcs/commands/checkout.py +27 -32
- memvcs/commands/clean.py +18 -23
- memvcs/commands/clone.py +11 -1
- memvcs/commands/commit.py +40 -39
- memvcs/commands/daemon.py +109 -76
- memvcs/commands/decay.py +77 -0
- memvcs/commands/diff.py +56 -57
- memvcs/commands/distill.py +90 -0
- memvcs/commands/federated.py +53 -0
- memvcs/commands/fsck.py +86 -61
- memvcs/commands/garden.py +40 -35
- memvcs/commands/gc.py +51 -0
- memvcs/commands/graph.py +41 -48
- memvcs/commands/init.py +16 -24
- memvcs/commands/log.py +25 -40
- memvcs/commands/merge.py +69 -27
- memvcs/commands/pack.py +129 -0
- memvcs/commands/prove.py +66 -0
- memvcs/commands/pull.py +31 -1
- memvcs/commands/push.py +4 -2
- memvcs/commands/recall.py +145 -0
- memvcs/commands/reflog.py +13 -22
- memvcs/commands/remote.py +1 -0
- memvcs/commands/repair.py +66 -0
- memvcs/commands/reset.py +23 -33
- memvcs/commands/resolve.py +130 -0
- memvcs/commands/resurrect.py +82 -0
- memvcs/commands/search.py +3 -4
- memvcs/commands/serve.py +2 -1
- memvcs/commands/show.py +66 -36
- memvcs/commands/stash.py +34 -34
- memvcs/commands/status.py +27 -35
- memvcs/commands/tag.py +23 -47
- memvcs/commands/test.py +30 -44
- memvcs/commands/timeline.py +111 -0
- memvcs/commands/tree.py +26 -27
- memvcs/commands/verify.py +110 -0
- memvcs/commands/when.py +115 -0
- memvcs/core/access_index.py +167 -0
- memvcs/core/audit.py +124 -0
- memvcs/core/config_loader.py +3 -1
- memvcs/core/consistency.py +214 -0
- memvcs/core/crypto_verify.py +280 -0
- memvcs/core/decay.py +185 -0
- memvcs/core/diff.py +158 -143
- memvcs/core/distiller.py +277 -0
- memvcs/core/encryption.py +169 -0
- memvcs/core/federated.py +86 -0
- memvcs/core/gardener.py +176 -145
- memvcs/core/hooks.py +48 -14
- memvcs/core/ipfs_remote.py +39 -0
- memvcs/core/knowledge_graph.py +135 -138
- memvcs/core/llm/__init__.py +10 -0
- memvcs/core/llm/anthropic_provider.py +50 -0
- memvcs/core/llm/base.py +27 -0
- memvcs/core/llm/factory.py +30 -0
- memvcs/core/llm/openai_provider.py +36 -0
- memvcs/core/merge.py +260 -170
- memvcs/core/objects.py +110 -101
- memvcs/core/pack.py +92 -0
- memvcs/core/pii_scanner.py +147 -146
- memvcs/core/privacy_budget.py +63 -0
- memvcs/core/refs.py +132 -115
- memvcs/core/remote.py +38 -0
- memvcs/core/repository.py +254 -164
- memvcs/core/schema.py +155 -113
- memvcs/core/staging.py +60 -65
- memvcs/core/storage/__init__.py +20 -18
- memvcs/core/storage/base.py +74 -70
- memvcs/core/storage/gcs.py +70 -68
- memvcs/core/storage/local.py +42 -40
- memvcs/core/storage/s3.py +105 -110
- memvcs/core/temporal_index.py +121 -0
- memvcs/core/test_runner.py +101 -93
- memvcs/core/trust.py +103 -0
- memvcs/core/vector_store.py +56 -36
- memvcs/core/zk_proofs.py +26 -0
- memvcs/integrations/mcp_server.py +1 -3
- memvcs/integrations/web_ui/server.py +25 -26
- memvcs/retrieval/__init__.py +22 -0
- memvcs/retrieval/base.py +54 -0
- memvcs/retrieval/pack.py +128 -0
- memvcs/retrieval/recaller.py +105 -0
- memvcs/retrieval/strategies.py +314 -0
- memvcs/utils/__init__.py +3 -3
- memvcs/utils/helpers.py +52 -52
- agmem-0.1.1.dist-info/RECORD +0 -67
- {agmem-0.1.1.dist-info → agmem-0.1.3.dist-info}/WHEEL +0 -0
- {agmem-0.1.1.dist-info → agmem-0.1.3.dist-info}/entry_points.txt +0 -0
- {agmem-0.1.1.dist-info → agmem-0.1.3.dist-info}/licenses/LICENSE +0 -0
- {agmem-0.1.1.dist-info → agmem-0.1.3.dist-info}/top_level.txt +0 -0
memvcs/core/repository.py
CHANGED
|
@@ -20,134 +20,159 @@ from .refs import RefsManager
|
|
|
20
20
|
|
|
21
21
|
class Repository:
|
|
22
22
|
"""Main repository class coordinating all agmem operations."""
|
|
23
|
-
|
|
23
|
+
|
|
24
24
|
def __init__(self, path: Path):
|
|
25
25
|
self.root = Path(path).resolve()
|
|
26
|
-
self.mem_dir = self.root /
|
|
27
|
-
self.current_dir = self.root /
|
|
28
|
-
self.config_file = self.mem_dir /
|
|
29
|
-
|
|
26
|
+
self.mem_dir = self.root / ".mem"
|
|
27
|
+
self.current_dir = self.root / "current"
|
|
28
|
+
self.config_file = self.mem_dir / "config.json"
|
|
29
|
+
|
|
30
30
|
self.object_store: Optional[ObjectStore] = None
|
|
31
31
|
self.staging: Optional[StagingArea] = None
|
|
32
32
|
self.refs: Optional[RefsManager] = None
|
|
33
|
-
|
|
33
|
+
|
|
34
34
|
if self.is_valid_repo():
|
|
35
35
|
self._init_components()
|
|
36
|
-
|
|
36
|
+
|
|
37
37
|
def _init_components(self):
|
|
38
38
|
"""Initialize repository components."""
|
|
39
|
-
|
|
39
|
+
encryptor = None
|
|
40
|
+
try:
|
|
41
|
+
config = self.get_config()
|
|
42
|
+
if config.get("encryption", {}).get("enabled"):
|
|
43
|
+
from .encryption import (
|
|
44
|
+
load_encryption_config,
|
|
45
|
+
ObjectStoreEncryptor,
|
|
46
|
+
get_key_from_env_or_cache,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if load_encryption_config(self.mem_dir):
|
|
50
|
+
encryptor = ObjectStoreEncryptor(
|
|
51
|
+
lambda: get_key_from_env_or_cache(self.mem_dir)
|
|
52
|
+
)
|
|
53
|
+
except Exception:
|
|
54
|
+
pass
|
|
55
|
+
self.object_store = ObjectStore(self.mem_dir / "objects", encryptor=encryptor)
|
|
40
56
|
self.staging = StagingArea(self.mem_dir)
|
|
41
57
|
self.refs = RefsManager(self.mem_dir)
|
|
42
|
-
|
|
58
|
+
|
|
43
59
|
@classmethod
|
|
44
|
-
def init(
|
|
60
|
+
def init(
|
|
61
|
+
cls, path: Path, author_name: str = "Agent", author_email: str = "agent@example.com"
|
|
62
|
+
) -> "Repository":
|
|
45
63
|
"""
|
|
46
64
|
Initialize a new repository.
|
|
47
|
-
|
|
65
|
+
|
|
48
66
|
Args:
|
|
49
67
|
path: Directory to initialize repository in
|
|
50
68
|
author_name: Default author name
|
|
51
69
|
author_email: Default author email
|
|
52
|
-
|
|
70
|
+
|
|
53
71
|
Returns:
|
|
54
72
|
Initialized Repository instance
|
|
55
73
|
"""
|
|
56
74
|
repo = cls(path)
|
|
57
|
-
|
|
75
|
+
|
|
58
76
|
if repo.is_valid_repo():
|
|
59
77
|
raise ValueError(f"Repository already exists at {path}")
|
|
60
|
-
|
|
78
|
+
|
|
61
79
|
# Create directory structure
|
|
62
80
|
repo.mem_dir.mkdir(parents=True, exist_ok=True)
|
|
63
81
|
repo.current_dir.mkdir(parents=True, exist_ok=True)
|
|
64
|
-
|
|
82
|
+
|
|
65
83
|
for mem_type in MEMORY_TYPES:
|
|
66
84
|
(repo.current_dir / mem_type).mkdir(parents=True, exist_ok=True)
|
|
67
|
-
|
|
85
|
+
|
|
68
86
|
# Create object store directories
|
|
69
|
-
(repo.mem_dir /
|
|
70
|
-
|
|
87
|
+
(repo.mem_dir / "objects").mkdir(parents=True, exist_ok=True)
|
|
88
|
+
|
|
71
89
|
# Create staging directory
|
|
72
|
-
(repo.mem_dir /
|
|
73
|
-
|
|
90
|
+
(repo.mem_dir / "staging").mkdir(parents=True, exist_ok=True)
|
|
91
|
+
|
|
74
92
|
# Create refs directories
|
|
75
|
-
(repo.mem_dir /
|
|
76
|
-
(repo.mem_dir /
|
|
77
|
-
|
|
93
|
+
(repo.mem_dir / "refs" / "heads").mkdir(parents=True, exist_ok=True)
|
|
94
|
+
(repo.mem_dir / "refs" / "tags").mkdir(parents=True, exist_ok=True)
|
|
95
|
+
|
|
78
96
|
# Create config
|
|
79
97
|
config = {
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
'gc_prune_days': 90
|
|
98
|
+
"author": {"name": author_name, "email": author_email},
|
|
99
|
+
"core": {"default_branch": "main", "compression": True, "gc_prune_days": 90},
|
|
100
|
+
"memory": {
|
|
101
|
+
"auto_summarize": True,
|
|
102
|
+
"summarizer_model": "default",
|
|
103
|
+
"max_episode_size": 1024 * 1024, # 1MB
|
|
104
|
+
"consolidation_threshold": 100, # Episodes before consolidation
|
|
88
105
|
},
|
|
89
|
-
'memory': {
|
|
90
|
-
'auto_summarize': True,
|
|
91
|
-
'summarizer_model': 'default',
|
|
92
|
-
'max_episode_size': 1024 * 1024, # 1MB
|
|
93
|
-
'consolidation_threshold': 100 # Episodes before consolidation
|
|
94
|
-
}
|
|
95
106
|
}
|
|
96
107
|
repo.config_file.write_text(json.dumps(config, indent=2))
|
|
97
|
-
|
|
108
|
+
|
|
98
109
|
# Initialize components
|
|
99
110
|
repo._init_components()
|
|
100
|
-
|
|
111
|
+
|
|
101
112
|
# Initialize HEAD
|
|
102
|
-
repo.refs.init_head(
|
|
103
|
-
|
|
113
|
+
repo.refs.init_head("main")
|
|
114
|
+
|
|
115
|
+
# Tamper-evident audit
|
|
116
|
+
try:
|
|
117
|
+
from .audit import append_audit
|
|
118
|
+
|
|
119
|
+
append_audit(repo.mem_dir, "init", {"author": author_name, "branch": "main"})
|
|
120
|
+
except Exception:
|
|
121
|
+
pass
|
|
122
|
+
|
|
104
123
|
return repo
|
|
105
|
-
|
|
124
|
+
|
|
106
125
|
def is_valid_repo(self) -> bool:
|
|
107
126
|
"""Check if this is a valid repository."""
|
|
108
127
|
return (
|
|
109
|
-
self.mem_dir.exists()
|
|
110
|
-
self.config_file.exists()
|
|
111
|
-
(self.mem_dir /
|
|
128
|
+
self.mem_dir.exists()
|
|
129
|
+
and self.config_file.exists()
|
|
130
|
+
and (self.mem_dir / "objects").exists()
|
|
112
131
|
)
|
|
113
|
-
|
|
132
|
+
|
|
114
133
|
def get_config(self) -> Dict[str, Any]:
|
|
115
134
|
"""Get repository configuration."""
|
|
116
135
|
if self.config_file.exists():
|
|
117
136
|
return json.loads(self.config_file.read_text())
|
|
118
137
|
return {}
|
|
119
|
-
|
|
138
|
+
|
|
120
139
|
def set_config(self, config: Dict[str, Any]):
|
|
121
140
|
"""Set repository configuration."""
|
|
122
141
|
self.config_file.write_text(json.dumps(config, indent=2))
|
|
123
|
-
|
|
142
|
+
try:
|
|
143
|
+
from .audit import append_audit
|
|
144
|
+
|
|
145
|
+
append_audit(self.mem_dir, "config_change", {})
|
|
146
|
+
except Exception:
|
|
147
|
+
pass
|
|
148
|
+
|
|
124
149
|
def get_author(self) -> str:
|
|
125
150
|
"""Get the configured author string."""
|
|
126
151
|
config = self.get_config()
|
|
127
|
-
author = config.get(
|
|
128
|
-
name = author.get(
|
|
129
|
-
email = author.get(
|
|
152
|
+
author = config.get("author", {})
|
|
153
|
+
name = author.get("name", "Agent")
|
|
154
|
+
email = author.get("email", "agent@example.com")
|
|
130
155
|
return f"{name} <{email}>"
|
|
131
156
|
|
|
132
157
|
def get_agmem_config(self) -> Dict[str, Any]:
|
|
133
158
|
"""Get merged agmem config (user + repo). Use for cloud and PII settings."""
|
|
134
159
|
return load_agmem_config(self.root)
|
|
135
|
-
|
|
160
|
+
|
|
136
161
|
def get_head_commit(self) -> Optional[Commit]:
|
|
137
162
|
"""Get the current HEAD commit object."""
|
|
138
163
|
if not self.refs:
|
|
139
164
|
return None
|
|
140
|
-
|
|
165
|
+
|
|
141
166
|
head = self.refs.get_head()
|
|
142
|
-
if head[
|
|
143
|
-
commit_hash = self.refs.get_branch_commit(head[
|
|
167
|
+
if head["type"] == "branch":
|
|
168
|
+
commit_hash = self.refs.get_branch_commit(head["value"])
|
|
144
169
|
else:
|
|
145
|
-
commit_hash = head[
|
|
146
|
-
|
|
170
|
+
commit_hash = head["value"]
|
|
171
|
+
|
|
147
172
|
if commit_hash:
|
|
148
173
|
return Commit.load(self.object_store, commit_hash)
|
|
149
174
|
return None
|
|
150
|
-
|
|
175
|
+
|
|
151
176
|
def get_commit_tree(self, commit_hash: str) -> Optional[Tree]:
|
|
152
177
|
"""Get the tree for a specific commit."""
|
|
153
178
|
commit = Commit.load(self.object_store, commit_hash)
|
|
@@ -156,10 +181,22 @@ class Repository:
|
|
|
156
181
|
return None
|
|
157
182
|
|
|
158
183
|
def resolve_ref(self, ref: str) -> Optional[str]:
|
|
159
|
-
"""Resolve a reference (branch, tag, HEAD, HEAD~n, commit hash) to a commit hash."""
|
|
184
|
+
"""Resolve a reference (branch, tag, HEAD, HEAD~n, commit hash, or ISO date) to a commit hash."""
|
|
160
185
|
if not self.refs:
|
|
161
186
|
return None
|
|
162
|
-
|
|
187
|
+
resolved = self.refs.resolve_ref(ref, self.object_store)
|
|
188
|
+
if resolved:
|
|
189
|
+
return resolved
|
|
190
|
+
# Try temporal resolution for ISO date strings
|
|
191
|
+
if ref and (ref[0].isdigit() or ref.startswith("202")):
|
|
192
|
+
try:
|
|
193
|
+
from .temporal_index import TemporalIndex
|
|
194
|
+
|
|
195
|
+
ti = TemporalIndex(self.mem_dir, self.object_store)
|
|
196
|
+
return ti.resolve_at(ref)
|
|
197
|
+
except Exception:
|
|
198
|
+
pass
|
|
199
|
+
return None
|
|
163
200
|
|
|
164
201
|
def _path_under_current_dir(self, relative_path: str) -> Optional[Path]:
|
|
165
202
|
"""Resolve path under current/; return None if it escapes (path traversal)."""
|
|
@@ -173,14 +210,14 @@ class Repository:
|
|
|
173
210
|
def stage_file(self, filepath: str, content: Optional[bytes] = None) -> str:
|
|
174
211
|
"""
|
|
175
212
|
Stage a file for commit.
|
|
176
|
-
|
|
213
|
+
|
|
177
214
|
Args:
|
|
178
215
|
filepath: Path relative to current/ directory
|
|
179
216
|
content: File content (if None, reads from current/)
|
|
180
|
-
|
|
217
|
+
|
|
181
218
|
Returns:
|
|
182
219
|
Blob hash of staged content
|
|
183
|
-
|
|
220
|
+
|
|
184
221
|
Raises:
|
|
185
222
|
FileNotFoundError: If file does not exist
|
|
186
223
|
ValueError: If filepath escapes current/ (path traversal)
|
|
@@ -192,29 +229,31 @@ class Repository:
|
|
|
192
229
|
if not full_path.exists():
|
|
193
230
|
raise FileNotFoundError(f"File not found: {filepath}")
|
|
194
231
|
content = full_path.read_bytes()
|
|
195
|
-
|
|
232
|
+
|
|
196
233
|
# Store as blob
|
|
197
234
|
blob = Blob(content=content)
|
|
198
235
|
blob_hash = blob.store(self.object_store)
|
|
199
|
-
|
|
236
|
+
|
|
200
237
|
# Add to staging area
|
|
201
238
|
self.staging.add(filepath, blob_hash, content)
|
|
202
|
-
|
|
239
|
+
|
|
203
240
|
return blob_hash
|
|
204
|
-
|
|
241
|
+
|
|
205
242
|
def _build_tree_from_staged(self) -> str:
|
|
206
243
|
"""Build and store tree from staged files. Returns tree hash."""
|
|
207
244
|
staged_files = self.staging.get_staged_files()
|
|
208
245
|
entries = []
|
|
209
246
|
for path, sf in staged_files.items():
|
|
210
247
|
path_obj = Path(path)
|
|
211
|
-
entries.append(
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
248
|
+
entries.append(
|
|
249
|
+
TreeEntry(
|
|
250
|
+
mode=oct(sf.mode)[2:],
|
|
251
|
+
obj_type="blob",
|
|
252
|
+
hash=sf.blob_hash,
|
|
253
|
+
name=path_obj.name,
|
|
254
|
+
path=str(path_obj.parent) if str(path_obj.parent) != "." else "",
|
|
255
|
+
)
|
|
256
|
+
)
|
|
218
257
|
tree = Tree(entries=entries)
|
|
219
258
|
return tree.store(self.object_store)
|
|
220
259
|
|
|
@@ -240,101 +279,133 @@ class Repository:
|
|
|
240
279
|
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
241
280
|
filepath.write_bytes(blob.content)
|
|
242
281
|
|
|
243
|
-
def stage_directory(self, dirpath: str =
|
|
282
|
+
def stage_directory(self, dirpath: str = "") -> Dict[str, str]:
|
|
244
283
|
"""
|
|
245
284
|
Stage all files in a directory.
|
|
246
|
-
|
|
285
|
+
|
|
247
286
|
Args:
|
|
248
287
|
dirpath: Directory path relative to current/ (empty for all)
|
|
249
|
-
|
|
288
|
+
|
|
250
289
|
Returns:
|
|
251
290
|
Dict mapping file paths to blob hashes
|
|
252
291
|
"""
|
|
253
292
|
target_dir = self.current_dir / dirpath if dirpath else self.current_dir
|
|
254
293
|
staged = {}
|
|
255
|
-
|
|
294
|
+
|
|
256
295
|
for root, dirs, files in os.walk(target_dir):
|
|
257
296
|
# Skip hidden directories
|
|
258
|
-
dirs[:] = [d for d in dirs if not d.startswith(
|
|
259
|
-
|
|
297
|
+
dirs[:] = [d for d in dirs if not d.startswith(".")]
|
|
298
|
+
|
|
260
299
|
for filename in files:
|
|
261
300
|
full_path = Path(root) / filename
|
|
262
301
|
rel_path = full_path.relative_to(self.current_dir)
|
|
263
|
-
|
|
302
|
+
|
|
264
303
|
content = full_path.read_bytes()
|
|
265
304
|
blob_hash = self.stage_file(str(rel_path), content)
|
|
266
305
|
staged[str(rel_path)] = blob_hash
|
|
267
|
-
|
|
306
|
+
|
|
268
307
|
return staged
|
|
269
|
-
|
|
308
|
+
|
|
270
309
|
def commit(self, message: str, metadata: Optional[Dict[str, Any]] = None) -> str:
|
|
271
310
|
"""
|
|
272
311
|
Create a commit from staged changes.
|
|
273
|
-
|
|
312
|
+
|
|
274
313
|
Args:
|
|
275
314
|
message: Commit message
|
|
276
315
|
metadata: Additional metadata
|
|
277
|
-
|
|
316
|
+
|
|
278
317
|
Returns:
|
|
279
318
|
Commit hash
|
|
280
319
|
"""
|
|
281
320
|
staged_files = self.staging.get_staged_files()
|
|
282
|
-
|
|
321
|
+
|
|
283
322
|
if not staged_files:
|
|
284
323
|
raise ValueError("No changes staged for commit")
|
|
285
324
|
|
|
286
325
|
tree_hash = self._build_tree_from_staged()
|
|
287
|
-
|
|
326
|
+
|
|
288
327
|
# Get parent commit
|
|
289
328
|
head_commit = self.get_head_commit()
|
|
290
329
|
parents = [head_commit.store(self.object_store)] if head_commit else []
|
|
291
|
-
|
|
330
|
+
|
|
331
|
+
# Cryptographic verification: Merkle root + optional signing (private key from env)
|
|
332
|
+
meta = dict(metadata or {})
|
|
333
|
+
try:
|
|
334
|
+
from .crypto_verify import (
|
|
335
|
+
_collect_blob_hashes_from_tree,
|
|
336
|
+
build_merkle_tree,
|
|
337
|
+
load_private_key_from_env,
|
|
338
|
+
sign_merkle_root,
|
|
339
|
+
ED25519_AVAILABLE,
|
|
340
|
+
)
|
|
341
|
+
from .objects import Tree
|
|
342
|
+
|
|
343
|
+
tree = Tree.load(self.object_store, tree_hash)
|
|
344
|
+
if tree:
|
|
345
|
+
blobs = _collect_blob_hashes_from_tree(self.object_store, tree_hash)
|
|
346
|
+
merkle_root = build_merkle_tree(blobs)
|
|
347
|
+
meta["merkle_root"] = merkle_root
|
|
348
|
+
if ED25519_AVAILABLE:
|
|
349
|
+
private_pem = load_private_key_from_env()
|
|
350
|
+
if private_pem:
|
|
351
|
+
meta["signature"] = sign_merkle_root(merkle_root, private_pem)
|
|
352
|
+
except Exception:
|
|
353
|
+
pass
|
|
354
|
+
|
|
292
355
|
# Create commit
|
|
293
356
|
commit = Commit(
|
|
294
357
|
tree=tree_hash,
|
|
295
358
|
parents=parents,
|
|
296
359
|
author=self.get_author(),
|
|
297
|
-
timestamp=datetime.utcnow().isoformat() +
|
|
360
|
+
timestamp=datetime.utcnow().isoformat() + "Z",
|
|
298
361
|
message=message,
|
|
299
|
-
metadata=
|
|
362
|
+
metadata=meta,
|
|
300
363
|
)
|
|
301
364
|
commit_hash = commit.store(self.object_store)
|
|
302
|
-
|
|
365
|
+
|
|
303
366
|
# Reflog: record HEAD change
|
|
304
|
-
old_hash = parents[0] if parents else
|
|
305
|
-
self.refs.append_reflog(
|
|
306
|
-
|
|
367
|
+
old_hash = parents[0] if parents else "0" * 64
|
|
368
|
+
self.refs.append_reflog("HEAD", old_hash, commit_hash, f"commit: {message}")
|
|
369
|
+
|
|
370
|
+
# Audit
|
|
371
|
+
try:
|
|
372
|
+
from .audit import append_audit
|
|
373
|
+
|
|
374
|
+
append_audit(self.mem_dir, "commit", {"commit": commit_hash, "message": message})
|
|
375
|
+
except Exception:
|
|
376
|
+
pass
|
|
377
|
+
|
|
307
378
|
# Update HEAD
|
|
308
379
|
head = self.refs.get_head()
|
|
309
|
-
if head[
|
|
310
|
-
self.refs.set_branch_commit(head[
|
|
380
|
+
if head["type"] == "branch":
|
|
381
|
+
self.refs.set_branch_commit(head["value"], commit_hash)
|
|
311
382
|
else:
|
|
312
383
|
self.refs.set_head_detached(commit_hash)
|
|
313
|
-
|
|
384
|
+
|
|
314
385
|
# Clear staging area
|
|
315
386
|
self.staging.clear()
|
|
316
|
-
|
|
387
|
+
|
|
317
388
|
return commit_hash
|
|
318
|
-
|
|
389
|
+
|
|
319
390
|
def checkout(self, ref: str, force: bool = False) -> str:
|
|
320
391
|
"""
|
|
321
392
|
Checkout a commit or branch.
|
|
322
|
-
|
|
393
|
+
|
|
323
394
|
Args:
|
|
324
395
|
ref: Branch name, tag name, or commit hash
|
|
325
396
|
force: Whether to discard uncommitted changes
|
|
326
|
-
|
|
397
|
+
|
|
327
398
|
Returns:
|
|
328
399
|
Commit hash that was checked out
|
|
329
400
|
"""
|
|
330
401
|
# Get current HEAD for reflog
|
|
331
402
|
old_head = self.refs.get_head()
|
|
332
403
|
old_hash = None
|
|
333
|
-
if old_head[
|
|
334
|
-
old_hash = self.refs.get_branch_commit(old_head[
|
|
404
|
+
if old_head["type"] == "branch":
|
|
405
|
+
old_hash = self.refs.get_branch_commit(old_head["value"])
|
|
335
406
|
else:
|
|
336
|
-
old_hash = old_head.get(
|
|
337
|
-
|
|
407
|
+
old_hash = old_head.get("value")
|
|
408
|
+
|
|
338
409
|
# Resolve reference
|
|
339
410
|
commit_hash = self.resolve_ref(ref)
|
|
340
411
|
if not commit_hash:
|
|
@@ -345,172 +416,191 @@ class Repository:
|
|
|
345
416
|
if not tree:
|
|
346
417
|
raise ValueError(f"Reference not found: {ref}")
|
|
347
418
|
|
|
419
|
+
# Cryptographic verification: reject if Merkle/signature invalid
|
|
420
|
+
try:
|
|
421
|
+
from .crypto_verify import verify_commit_optional
|
|
422
|
+
|
|
423
|
+
verify_commit_optional(
|
|
424
|
+
self.object_store, commit_hash, mem_dir=self.mem_dir, strict=False
|
|
425
|
+
)
|
|
426
|
+
except ValueError as e:
|
|
427
|
+
raise ValueError(str(e))
|
|
428
|
+
|
|
348
429
|
# Check for uncommitted changes
|
|
349
430
|
if not force:
|
|
350
431
|
staged = self.staging.get_staged_files()
|
|
351
432
|
if staged:
|
|
352
433
|
raise ValueError(
|
|
353
|
-
"You have uncommitted changes. "
|
|
354
|
-
"Commit them or use --force to discard."
|
|
434
|
+
"You have uncommitted changes. " "Commit them or use --force to discard."
|
|
355
435
|
)
|
|
356
436
|
|
|
357
437
|
self._restore_tree_to_current_dir(tree)
|
|
358
|
-
|
|
438
|
+
|
|
359
439
|
# Reflog: record HEAD change
|
|
360
440
|
if old_hash and old_hash != commit_hash:
|
|
361
|
-
self.refs.append_reflog(
|
|
362
|
-
|
|
441
|
+
self.refs.append_reflog("HEAD", old_hash, commit_hash, f"checkout: moving to {ref}")
|
|
442
|
+
|
|
363
443
|
# Update HEAD
|
|
364
444
|
if self.refs.branch_exists(ref):
|
|
365
445
|
self.refs.set_head_branch(ref)
|
|
366
446
|
else:
|
|
367
447
|
self.refs.set_head_detached(commit_hash)
|
|
368
|
-
|
|
448
|
+
|
|
369
449
|
# Clear staging
|
|
370
450
|
self.staging.clear()
|
|
371
|
-
|
|
451
|
+
|
|
452
|
+
# Audit
|
|
453
|
+
try:
|
|
454
|
+
from .audit import append_audit
|
|
455
|
+
|
|
456
|
+
append_audit(self.mem_dir, "checkout", {"ref": ref, "commit": commit_hash})
|
|
457
|
+
except Exception:
|
|
458
|
+
pass
|
|
459
|
+
|
|
372
460
|
return commit_hash
|
|
373
|
-
|
|
461
|
+
|
|
374
462
|
def get_status(self) -> Dict[str, Any]:
|
|
375
463
|
"""
|
|
376
464
|
Get repository status.
|
|
377
|
-
|
|
465
|
+
|
|
378
466
|
Returns:
|
|
379
467
|
Status dictionary with staged, modified, untracked files
|
|
380
468
|
"""
|
|
381
469
|
staged = self.staging.get_staged_files()
|
|
382
|
-
|
|
470
|
+
|
|
383
471
|
# Compare current directory with HEAD
|
|
384
472
|
head_commit = self.get_head_commit()
|
|
385
473
|
head_files = {}
|
|
386
|
-
|
|
474
|
+
|
|
387
475
|
if head_commit:
|
|
388
476
|
tree = Tree.load(self.object_store, head_commit.tree)
|
|
389
477
|
if tree:
|
|
390
478
|
for entry in tree.entries:
|
|
391
|
-
path = entry.path +
|
|
479
|
+
path = entry.path + "/" + entry.name if entry.path else entry.name
|
|
392
480
|
head_files[path] = entry.hash
|
|
393
|
-
|
|
481
|
+
|
|
394
482
|
# Check working directory
|
|
395
483
|
modified = []
|
|
396
484
|
untracked = []
|
|
397
|
-
|
|
485
|
+
|
|
398
486
|
for root, dirs, files in os.walk(self.current_dir):
|
|
399
|
-
dirs[:] = [d for d in dirs if not d.startswith(
|
|
400
|
-
|
|
487
|
+
dirs[:] = [d for d in dirs if not d.startswith(".")]
|
|
488
|
+
|
|
401
489
|
for filename in files:
|
|
402
490
|
full_path = Path(root) / filename
|
|
403
491
|
rel_path = str(full_path.relative_to(self.current_dir))
|
|
404
|
-
|
|
492
|
+
|
|
405
493
|
if rel_path not in staged:
|
|
406
494
|
content = full_path.read_bytes()
|
|
407
495
|
blob = Blob(content=content)
|
|
408
496
|
blob_hash = blob.store(self.object_store)
|
|
409
|
-
|
|
497
|
+
|
|
410
498
|
if rel_path in head_files:
|
|
411
499
|
if head_files[rel_path] != blob_hash:
|
|
412
500
|
modified.append(rel_path)
|
|
413
501
|
else:
|
|
414
502
|
untracked.append(rel_path)
|
|
415
|
-
|
|
503
|
+
|
|
416
504
|
# Check for deleted files
|
|
417
505
|
deleted = []
|
|
418
506
|
for path in head_files:
|
|
419
507
|
full_path = self.current_dir / path
|
|
420
508
|
if not full_path.exists() and path not in staged:
|
|
421
509
|
deleted.append(path)
|
|
422
|
-
|
|
510
|
+
|
|
423
511
|
return {
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
512
|
+
"staged": list(staged.keys()),
|
|
513
|
+
"modified": modified,
|
|
514
|
+
"untracked": untracked,
|
|
515
|
+
"deleted": deleted,
|
|
516
|
+
"head": self.refs.get_head(),
|
|
517
|
+
"branch": self.refs.get_current_branch(),
|
|
430
518
|
}
|
|
431
|
-
|
|
519
|
+
|
|
432
520
|
def get_log(self, max_count: int = 10) -> List[Dict[str, Any]]:
|
|
433
521
|
"""
|
|
434
522
|
Get commit history.
|
|
435
|
-
|
|
523
|
+
|
|
436
524
|
Args:
|
|
437
525
|
max_count: Maximum number of commits to return
|
|
438
|
-
|
|
526
|
+
|
|
439
527
|
Returns:
|
|
440
528
|
List of commit info dictionaries
|
|
441
529
|
"""
|
|
442
530
|
commits = []
|
|
443
531
|
commit_hash = None
|
|
444
|
-
|
|
532
|
+
|
|
445
533
|
# Get starting commit
|
|
446
534
|
head = self.refs.get_head()
|
|
447
|
-
if head[
|
|
448
|
-
commit_hash = self.refs.get_branch_commit(head[
|
|
535
|
+
if head["type"] == "branch":
|
|
536
|
+
commit_hash = self.refs.get_branch_commit(head["value"])
|
|
449
537
|
else:
|
|
450
|
-
commit_hash = head[
|
|
451
|
-
|
|
538
|
+
commit_hash = head["value"]
|
|
539
|
+
|
|
452
540
|
# Walk back through parents
|
|
453
541
|
while commit_hash and len(commits) < max_count:
|
|
454
542
|
commit = Commit.load(self.object_store, commit_hash)
|
|
455
543
|
if not commit:
|
|
456
544
|
break
|
|
457
|
-
|
|
458
|
-
commits.append(
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
545
|
+
|
|
546
|
+
commits.append(
|
|
547
|
+
{
|
|
548
|
+
"hash": commit_hash,
|
|
549
|
+
"short_hash": commit_hash[:8],
|
|
550
|
+
"message": commit.message,
|
|
551
|
+
"author": commit.author,
|
|
552
|
+
"timestamp": commit.timestamp,
|
|
553
|
+
"parents": commit.parents,
|
|
554
|
+
}
|
|
555
|
+
)
|
|
556
|
+
|
|
467
557
|
# Follow first parent (linear history for now)
|
|
468
558
|
commit_hash = commit.parents[0] if commit.parents else None
|
|
469
|
-
|
|
559
|
+
|
|
470
560
|
return commits
|
|
471
|
-
|
|
472
|
-
def stash_create(self, message: str =
|
|
561
|
+
|
|
562
|
+
def stash_create(self, message: str = "") -> Optional[str]:
|
|
473
563
|
"""
|
|
474
564
|
Stash current changes (staged + modified + untracked) and reset to HEAD.
|
|
475
565
|
Returns stash commit hash or None if nothing to stash.
|
|
476
566
|
"""
|
|
477
567
|
status = self.get_status()
|
|
478
|
-
if not status[
|
|
568
|
+
if not status["staged"] and not status["modified"] and not status["untracked"]:
|
|
479
569
|
return None
|
|
480
|
-
|
|
570
|
+
|
|
481
571
|
# Stage everything
|
|
482
572
|
self.stage_directory()
|
|
483
573
|
staged = self.staging.get_staged_files()
|
|
484
574
|
if not staged:
|
|
485
575
|
return None
|
|
486
|
-
|
|
576
|
+
|
|
487
577
|
# Create stash commit (parent = HEAD)
|
|
488
578
|
head_commit = self.get_head_commit()
|
|
489
579
|
parents = [head_commit.store(self.object_store)] if head_commit else []
|
|
490
|
-
|
|
580
|
+
|
|
491
581
|
tree_hash = self._build_tree_from_staged()
|
|
492
582
|
|
|
493
583
|
stash_commit = Commit(
|
|
494
584
|
tree=tree_hash,
|
|
495
585
|
parents=parents,
|
|
496
586
|
author=self.get_author(),
|
|
497
|
-
timestamp=datetime.utcnow().isoformat() +
|
|
498
|
-
message=message or
|
|
499
|
-
metadata={
|
|
587
|
+
timestamp=datetime.utcnow().isoformat() + "Z",
|
|
588
|
+
message=message or "WIP on " + (self.refs.get_current_branch() or "HEAD"),
|
|
589
|
+
metadata={"stash": True},
|
|
500
590
|
)
|
|
501
591
|
stash_hash = stash_commit.store(self.object_store)
|
|
502
|
-
|
|
592
|
+
|
|
503
593
|
self.refs.stash_push(stash_hash, message)
|
|
504
594
|
self.staging.clear()
|
|
505
|
-
|
|
506
|
-
head_hash = self.resolve_ref(
|
|
595
|
+
|
|
596
|
+
head_hash = self.resolve_ref("HEAD")
|
|
507
597
|
if head_hash:
|
|
508
598
|
tree = self.get_commit_tree(head_hash)
|
|
509
599
|
if tree:
|
|
510
600
|
self._restore_tree_to_current_dir(tree)
|
|
511
|
-
|
|
601
|
+
|
|
512
602
|
return stash_hash
|
|
513
|
-
|
|
603
|
+
|
|
514
604
|
def stash_pop(self, index: int = 0) -> Optional[str]:
|
|
515
605
|
"""Apply stash at index and remove from stash list."""
|
|
516
606
|
stash_hash = self.refs.stash_pop(index)
|