agmem 0.1.4__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: agmem
3
- Version: 0.1.4
3
+ Version: 0.1.5
4
4
  Summary: Agentic Memory Version Control System - Git for AI agent memories
5
5
  Home-page: https://github.com/vivek-tiwari-vt/agmem
6
6
  Author: agmem Team
@@ -1,4 +1,4 @@
1
- agmem-0.1.4.dist-info/licenses/LICENSE,sha256=X_S6RBErW-F0IDbM3FAEoDB-zxExFnl2m8640rTXphM,1067
1
+ agmem-0.1.5.dist-info/licenses/LICENSE,sha256=X_S6RBErW-F0IDbM3FAEoDB-zxExFnl2m8640rTXphM,1067
2
2
  memvcs/__init__.py,sha256=mXwHTSlUPWo4ERqJLGJnxmxtGQQHPSbXb4IpO61l04M,193
3
3
  memvcs/cli.py,sha256=YF06oMNjKWUmiNahILmfjrIXgoXzU-5BJFmbunSb8Sc,6075
4
4
  memvcs/commands/__init__.py,sha256=A2D6xWaO6epU7iV4QSvqvF5TspnwRyDN7NojmGatPrE,510
@@ -11,21 +11,21 @@ memvcs/commands/checkout.py,sha256=xaYZSbCQ-MyLWPtwA2FdH6WqGMI3oF3R2JmCufGBVFg,3
11
11
  memvcs/commands/clean.py,sha256=e0OhSQdHfFnOPTRbyKbM8IcX4yJD5n_kaBKjIeoaRBo,1973
12
12
  memvcs/commands/clone.py,sha256=aB0LcugIWJE9IEez6y70KlpZu4eIF2EdXZxE24jXyac,3260
13
13
  memvcs/commands/commit.py,sha256=W4ulVZuEETJh1SHpscaQfNjyQMqeIE0AYZIbMbTrsq4,6801
14
- memvcs/commands/daemon.py,sha256=KM9XSCdm4-aVBi4flKjiefpG8SfSrYl10phoMpz0gyk,10707
14
+ memvcs/commands/daemon.py,sha256=fV6aIz8bFP9VwB_MLudAb_lhhhBxSe2aV-Wjqe-nvPw,10708
15
15
  memvcs/commands/decay.py,sha256=QcgOTMJZxrfw_AOz94YHA3LGoNXRMDn69TxWlUrpSw4,2421
16
16
  memvcs/commands/diff.py,sha256=KcgD57_fae4uvQ8G9ZbXmLpAYYIDiWiBuVcjsDtyE1U,5480
17
17
  memvcs/commands/distill.py,sha256=reOldqg0lMgqIlpYEIKYfN_TxNwsjU9RnI8Uah1VqTQ,3088
18
18
  memvcs/commands/federated.py,sha256=Zj4kxHnjdIs1xu4v7B8XosQXNYK8Alv4I0kJQpmJe6Y,1840
19
19
  memvcs/commands/fsck.py,sha256=AdJBMLA2myQ0cJJcjUgsYptsE3qvX4JQc9UAwVmSHlA,7772
20
20
  memvcs/commands/garden.py,sha256=8JiLe3JRkOhY-N-h-IDuvdJiECiSElnUzXVtxtU2QgY,4050
21
- memvcs/commands/gc.py,sha256=vLGREkcHjR_rDvTvEh-dwNkAeTB9y4fU-BwBGbXOEg4,1940
21
+ memvcs/commands/gc.py,sha256=SIx_AG1msCFW2E1VPbJgiPBLTHCucpvBlbmxnIKCBhY,1978
22
22
  memvcs/commands/graph.py,sha256=MDi6bK2w0OrpK5VOE8XXw5gQX7BuD7VzUyqJ5Ra9Bsg,4746
23
23
  memvcs/commands/init.py,sha256=TsrLFLXwkDFT0opsYJTfwu0NIxLrNiiba5SpzRtxjDI,1614
24
24
  memvcs/commands/log.py,sha256=eNlLs0-PS2nF0pMAMI8izKGUiEb2m3S0RB4Zh6cUQpE,2859
25
25
  memvcs/commands/mcp.py,sha256=PMfwVD6uHltN58Jh7IOiS1w7oND42tg14QKRCJNudmY,1740
26
26
  memvcs/commands/merge.py,sha256=s3QLZp-_I6OvhllLhL9yFZAQ8d4M4FbvxkXV7gUgw5M,4877
27
27
  memvcs/commands/pack.py,sha256=rIDjMpxJG0oxrWnB3vCGHqviCITIeIbdy3nhuHVHzM8,3629
28
- memvcs/commands/prove.py,sha256=qQYYV5GdLd0Av4pwaxNvUCcl5pmiBwCrlXJwRtXVCF4,2141
28
+ memvcs/commands/prove.py,sha256=xSqNYGYtLOCdBywcDIduqVq-XoYDL9073mMTlkZmvuE,2171
29
29
  memvcs/commands/pull.py,sha256=hn9FIlNc3KUr5EUDo4_66KQSK0BSSXjOn32xaDNxf0Q,3621
30
30
  memvcs/commands/push.py,sha256=0abEdHkCMfHpH_Nmlw3OaU7Hzi0-RXF-cTVHpiSPw6k,5086
31
31
  memvcs/commands/recall.py,sha256=7nwC4mFYpdjKWG-Cs3cpDLr5_SgYJ6HkVSXDOkFke5A,4592
@@ -42,34 +42,34 @@ memvcs/commands/stash.py,sha256=CD3mRWehcmfVRPGGpndUBdTT_ku4LC_rmSKPvTEOTAo,3193
42
42
  memvcs/commands/status.py,sha256=O6BgzTiW3UHjXx6OKwH8X4g0hP0IlYDgr7As5RmeujU,3447
43
43
  memvcs/commands/tag.py,sha256=CaCnA3JifVrdr8DfX4g0bp-_oRvagJkQFcI4bJbW1uM,3004
44
44
  memvcs/commands/test.py,sha256=HZrpGZQhu9HnGZLjiq8TXi8jfOZqP-wc3bW6mgpP2yk,3926
45
- memvcs/commands/timeline.py,sha256=hH4kqd0cHbdtnjMrr_Sw6lt0kmu0yEVctHGOQ2iYK5s,4763
45
+ memvcs/commands/timeline.py,sha256=JkuhsQ-6wPWbsjlbJb_qM4mEkxkxcWWzniXXQB4Qtec,4764
46
46
  memvcs/commands/tree.py,sha256=vdULq4vIXA_4gNfMnHn_Y78BwE0sJoeTBOnFJR3WsZ4,4927
47
47
  memvcs/commands/verify.py,sha256=04CVW5NYWkUlPJ5z1Kci6dfQFM6UmPTGZh9ZextFLMc,3887
48
- memvcs/commands/when.py,sha256=gbSQHk96zu4TiH1QIdQJUeSsy9WFbjaheh5jjTsGopw,4772
48
+ memvcs/commands/when.py,sha256=bxG_tEYnZNBTl2IPkoxpc2LUEbO_5ev1hRvEzxQQDmc,4773
49
49
  memvcs/core/__init__.py,sha256=dkIC-4tS0GhwV2mZIbofEe8xR8uiFwrxslGf1aXwhYg,493
50
50
  memvcs/core/access_index.py,sha256=HhacnzSUASzRV2jhDHkwRFoPS3rtqh9n9yE1VV7JXpk,5596
51
51
  memvcs/core/audit.py,sha256=8APkm9Spl_-1rIdyRQz1elyxOeK3nlpwm0CLkpLlhTE,3732
52
- memvcs/core/compression_pipeline.py,sha256=ejFXBTHfBYbCD86a5V0-0wA39K-SBG7dt09oAy-XP5s,5481
52
+ memvcs/core/compression_pipeline.py,sha256=Vzr5v_0pgAG20C8znC0-Ho5fEwBoaTOLddxMTldd64M,5564
53
53
  memvcs/core/config_loader.py,sha256=j-jgLDp2TRzWN9ZEZebfWSfatevBNYs0FEb3ud1SIR8,8277
54
54
  memvcs/core/consistency.py,sha256=YOG8xhqZLKZCLbai2rdcP0KxYPNGFv5RRMwrQ6qCeyc,7462
55
55
  memvcs/core/constants.py,sha256=WUjAb50BFcF0mbFi_GNteDLCxLihmViBm9Fb-JMPmbM,220
56
- memvcs/core/crypto_verify.py,sha256=-yphuOE4bP-V1_bpMfNnJTLtpAdtKq8OV2hNUlUxiwk,10432
56
+ memvcs/core/crypto_verify.py,sha256=DTuC7Kfx6z2b8UWOWziBTqP633LrjXbdtGmBBqrJTF0,10424
57
57
  memvcs/core/decay.py,sha256=ROGwnqngs7eJNkbKmwyOdij607m73vpmoJqzrIDLBzk,6581
58
58
  memvcs/core/diff.py,sha256=koEHTLciIUxYKVJVuvmY0GDXMgDgGZP_qg5RayhF-iE,13226
59
- memvcs/core/distiller.py,sha256=859NUR3gzYQuvDFxMtGB2NcTGRmRj4VJyOZTlDKvSzI,11683
59
+ memvcs/core/distiller.py,sha256=ZOmrwYYhOla8rZncQP_0y0Ab9jCl3GjtdoH82HkXlsw,12621
60
60
  memvcs/core/encryption.py,sha256=epny_nlW6ylllv1qxs1mAcFq-PrLIisgfot4llOoAqw,5289
61
- memvcs/core/federated.py,sha256=RRNzhDVahTM-XQanT__8IBfGsS6fPDbq40b4v327iHg,5374
62
- memvcs/core/gardener.py,sha256=YKw4amhlPrX34gvg71PNUWmERUhrqvhrCuHnOj229gs,17462
61
+ memvcs/core/federated.py,sha256=vUYMZ0xv80hqGDRKq645Od1i8N33l-pIAkklJbJUlVg,5445
62
+ memvcs/core/gardener.py,sha256=lBWkyE72O-JMiHM-oqrnex9k_xSv7FvztjkOdLdB0Kk,18610
63
63
  memvcs/core/hooks.py,sha256=XF9z8J5sWjAcuOyWQ2nuvEzK0UV8s4ThrcltaBZttzw,5448
64
- memvcs/core/ipfs_remote.py,sha256=1Xob0Tiz0-GevgQrBhwUBifnVLO8U0dUIlvJS88BMBk,6651
65
- memvcs/core/knowledge_graph.py,sha256=6UuSdkaaXQnVti9TK10ak_KeCn8apLOgB70GXN_1I-Q,16370
64
+ memvcs/core/ipfs_remote.py,sha256=xmEO14bn_7Ej-W5jhx2QJyBd-ljj9S2COOxMmcZBiTs,6643
65
+ memvcs/core/knowledge_graph.py,sha256=GY27e1rgraF2zMpz_jsumdUtpgTRk48yH5CAEQ3TDl4,16416
66
66
  memvcs/core/merge.py,sha256=x2eSaxr4f63Eq00FCJ6DDe2TZU8H5yHQpzKzMhYsaFw,19871
67
- memvcs/core/objects.py,sha256=G6EigwJI0c9NZ9LB36L-3beNYt_MwETNgbtwnrptqMA,11004
68
- memvcs/core/pack.py,sha256=SiEReq9EMzffd3trnc38REWrh5Vo5HAmErovNgsx01U,9749
67
+ memvcs/core/objects.py,sha256=Xgw1IpQnJLCG5o_7gDHVQ-TNGR9CSpDYWRXzLgLSuec,11006
68
+ memvcs/core/pack.py,sha256=nTzpPNNk47e7_oN3z7bjaichpzI7q-ql2E8eI2UuGyM,9828
69
69
  memvcs/core/pii_scanner.py,sha256=T6gQ1APFrSDk980fjnv4ZMF-UztbJgmUFSwGrwWixEw,10802
70
70
  memvcs/core/privacy_budget.py,sha256=fOPlxoKEAmsKtda-OJCrSaKjTyw7ekcqdN7KfRBw1CY,2113
71
71
  memvcs/core/refs.py,sha256=4Nx2ZVRa_DzfUZ4O1AwzOHEjoGAEICJKqSd9GxaiD_g,16754
72
- memvcs/core/remote.py,sha256=HmGXx-NZFw7wgf0rHcwmGOQSWUoHNP85RHP5UaUDuuE,19429
72
+ memvcs/core/remote.py,sha256=1PINc6qYBIHRkNLMS8MLWM5DJIrX81uIfRrV6fXwwro,19495
73
73
  memvcs/core/repository.py,sha256=NzC2UFPv6ePxi5lfiSKyZFLclH4bJpWJz88pY7tDiv4,20605
74
74
  memvcs/core/schema.py,sha256=_CrEWCdArc0yDJ04GT7fyvjHqkal7gegdFSsFOjVpBc,15287
75
75
  memvcs/core/staging.py,sha256=dptdGi_74lhDkcGqGVU39ZyTkb25j-Rnkz0GWi83W1k,7221
@@ -77,7 +77,7 @@ memvcs/core/temporal_index.py,sha256=81hZHlVElp2UpXjseFVCdDUwxGM45zIU-y1dDlOhFHI
77
77
  memvcs/core/test_runner.py,sha256=7-0jCvji63JRbVfy3LNQWIQ7VL5weulOoG7SY1-YJbw,11496
78
78
  memvcs/core/trust.py,sha256=msx80Cl3bxyQTY8mFUKWY9P6l3zb1s8FafympgHwtpo,3494
79
79
  memvcs/core/vector_store.py,sha256=yUAp5BlaAtjkrtsdY1I-vmAp_YIFgJykBoNlp5hcg0I,11063
80
- memvcs/core/zk_proofs.py,sha256=dnwMqhGtzDQtaNuO1bhuLchqYyEDnXzbtd-jQH_M0qQ,5512
80
+ memvcs/core/zk_proofs.py,sha256=j9AyHucYe9tOSrlxDeUMGgpRHMvNFOl8s4Q0AQHLKP0,5514
81
81
  memvcs/core/llm/__init__.py,sha256=vnjtE9Xlv9a2pZV88DMT9JaINkZ30hC9VLPL5lJRlps,236
82
82
  memvcs/core/llm/anthropic_provider.py,sha256=O1eaCb9r464ajLJz-Gy8lGxBie5ojRUZ_5HdgRXO5KY,1540
83
83
  memvcs/core/llm/base.py,sha256=qPzg3KPAMeoyWGc_2JoVR4-plpdft5Rc2g9uO-Z4fJQ,623
@@ -99,8 +99,8 @@ memvcs/retrieval/recaller.py,sha256=8KY-XjMUz5_vcKf46zI64uk1DEM__u7wM92ShukOtsY,
99
99
  memvcs/retrieval/strategies.py,sha256=26yxQQubQfjxWQXknfVMxuzPHf2EcZxJg_B99BEdl5c,11458
100
100
  memvcs/utils/__init__.py,sha256=8psUzz4Ntv2GzbRebkeVsoyC6Ck-FIwi0_lfYdj5oho,185
101
101
  memvcs/utils/helpers.py,sha256=37zg_DcQ2y99b9NSLqxFkglHe13rJXKhFDpEbQ7iLhM,4121
102
- agmem-0.1.4.dist-info/METADATA,sha256=IU1QZw4zdsUApbsgLUvoh8BNBtbOc3AeSUINr1GSx80,37487
103
- agmem-0.1.4.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
104
- agmem-0.1.4.dist-info/entry_points.txt,sha256=at7eWycgjqOo1wbUMECnXUsNo3gpCkJTU71OzrGLHu0,42
105
- agmem-0.1.4.dist-info/top_level.txt,sha256=HtMMsKuwLKLOdgF1GxqQztqFM54tTJctVdJuOec6B-4,7
106
- agmem-0.1.4.dist-info/RECORD,,
102
+ agmem-0.1.5.dist-info/METADATA,sha256=q_9dsCFXbo9DGn6Hx4-7A9T3aHq9Sc5nS7ldEoazAdc,37487
103
+ agmem-0.1.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
104
+ agmem-0.1.5.dist-info/entry_points.txt,sha256=at7eWycgjqOo1wbUMECnXUsNo3gpCkJTU71OzrGLHu0,42
105
+ agmem-0.1.5.dist-info/top_level.txt,sha256=HtMMsKuwLKLOdgF1GxqQztqFM54tTJctVdJuOec6B-4,7
106
+ agmem-0.1.5.dist-info/RECORD,,
memvcs/commands/daemon.py CHANGED
@@ -208,6 +208,7 @@ class DaemonCommand:
208
208
  health_check_interval = 3600 # default 1 hour
209
209
  try:
210
210
  from ..core.config_loader import load_agmem_config
211
+
211
212
  config = load_agmem_config(repo.root)
212
213
  daemon_cfg = config.get("daemon") or {}
213
214
  health_check_interval = int(daemon_cfg.get("health_check_interval_seconds", 3600))
memvcs/commands/gc.py CHANGED
@@ -62,5 +62,7 @@ class GcCommand:
62
62
  dry_run=False,
63
63
  )
64
64
  if packed > 0:
65
- print(f"Packed {packed} object(s) into pack file ({repack_freed} bytes from loose).")
65
+ print(
66
+ f"Packed {packed} object(s) into pack file ({repack_freed} bytes from loose)."
67
+ )
66
68
  return 0
memvcs/commands/prove.py CHANGED
@@ -60,7 +60,9 @@ class ProveCommand:
60
60
  ok = prove_memory_freshness(path, args.value, out_path, mem_dir=repo.mem_dir)
61
61
 
62
62
  if not ok:
63
- print("Proof generation failed (keyword not in file, or signing key not set for freshness).")
63
+ print(
64
+ "Proof generation failed (keyword not in file, or signing key not set for freshness)."
65
+ )
64
66
  return 1
65
67
  print(f"Proof written to {out_path}")
66
68
  return 0
@@ -54,6 +54,7 @@ class TimelineCommand:
54
54
  if from_ts and to_ts:
55
55
  try:
56
56
  from ..core.temporal_index import TemporalIndex
57
+
57
58
  ti = TemporalIndex(repo.mem_dir, repo.object_store)
58
59
  range_entries = ti.range_query(from_ts, to_ts)
59
60
  commits_in_range = {ch for _, ch in range_entries}
memvcs/commands/when.py CHANGED
@@ -66,6 +66,7 @@ class WhenCommand:
66
66
  if from_ts and to_ts:
67
67
  try:
68
68
  from ..core.temporal_index import TemporalIndex
69
+
69
70
  ti = TemporalIndex(repo.mem_dir, repo.object_store)
70
71
  range_entries = ti.range_query(from_ts, to_ts)
71
72
  commits_in_range = {ch for _, ch in range_entries}
@@ -18,7 +18,9 @@ DEDUP_HASH_ALGO = "sha256"
18
18
  TIER_HOT_DAYS = 7
19
19
 
20
20
 
21
- def chunk_by_size(text: str, size: int = CHUNK_SIZE_DEFAULT, overlap: int = CHUNK_OVERLAP) -> List[str]:
21
+ def chunk_by_size(
22
+ text: str, size: int = CHUNK_SIZE_DEFAULT, overlap: int = CHUNK_OVERLAP
23
+ ) -> List[str]:
22
24
  """Split text into chunks by character size with optional overlap."""
23
25
  if not text or size <= 0:
24
26
  return []
@@ -37,7 +39,7 @@ def chunk_by_sentences(text: str, max_chunk_chars: int = 512) -> List[str]:
37
39
  """Split text into chunks by sentence boundaries, up to max_chunk_chars per chunk."""
38
40
  if not text:
39
41
  return []
40
- sentences = re.split(r'(?<=[.!?])\s+', text)
42
+ sentences = re.split(r"(?<=[.!?])\s+", text)
41
43
  chunks = []
42
44
  current = []
43
45
  current_len = 0
@@ -94,7 +96,10 @@ def dedup_by_similarity_threshold(
94
96
  embeddings = vector_store.embed(items)
95
97
  kept = [items[0]]
96
98
  for i in range(1, len(items)):
97
- sims = [vector_store.similarity(embeddings[i], vector_store.embed([kept[j]])[0]) for j in range(len(kept))]
99
+ sims = [
100
+ vector_store.similarity(embeddings[i], vector_store.embed([kept[j]])[0])
101
+ for j in range(len(kept))
102
+ ]
98
103
  if not any(s >= threshold for s in sims):
99
104
  kept.append(items[i])
100
105
  return kept
@@ -144,12 +149,15 @@ class CompressionPipeline:
144
149
  if self.dedup_hash:
145
150
  chunk_tuples = dedup_by_hash(chunks)
146
151
  else:
147
- chunk_tuples = [(c, hashlib.new(DEDUP_HASH_ALGO, c.encode()).hexdigest()) for c in chunks]
152
+ chunk_tuples = [
153
+ (c, hashlib.new(DEDUP_HASH_ALGO, c.encode()).hexdigest()) for c in chunks
154
+ ]
148
155
  tier = None
149
156
  if self.tier_by_recency and path and path.exists():
150
157
  try:
151
158
  mtime = path.stat().st_mtime
152
159
  from datetime import datetime, timezone
160
+
153
161
  age_days = (datetime.now(timezone.utc).timestamp() - mtime) / 86400
154
162
  tier = "hot" if age_days <= TIER_HOT_DAYS else "cold"
155
163
  except Exception:
@@ -239,7 +239,7 @@ def verify_commit(
239
239
  stored_sig = (commit.metadata or {}).get("signature")
240
240
  if not stored_root:
241
241
  return (False, "commit has no merkle_root (unverified)")
242
-
242
+
243
243
  # Verify that blob objects can be loaded successfully (detects tampering in compressed/encrypted content)
244
244
  blob_hashes = _collect_blob_hashes_from_tree(store, commit.tree)
245
245
  for blob_hash in blob_hashes:
@@ -249,7 +249,7 @@ def verify_commit(
249
249
  return (False, f"blob {blob_hash[:8]} corrupted or missing")
250
250
  except Exception as e:
251
251
  return (False, f"merkle_root mismatch (commit tampered)")
252
-
252
+
253
253
  computed_root = build_merkle_root_for_commit(store, commit_hash)
254
254
  if not computed_root:
255
255
  return (False, "could not build Merkle tree (missing tree/blobs)")
memvcs/core/distiller.py CHANGED
@@ -164,9 +164,16 @@ class Distiller:
164
164
  out_path = self.target_dir / f"consolidated-{ts}.md"
165
165
 
166
166
  confidence_score = self.config.extraction_confidence_threshold
167
- if self.config.use_dp and self.config.dp_epsilon is not None and self.config.dp_delta is not None:
167
+ if (
168
+ self.config.use_dp
169
+ and self.config.dp_epsilon is not None
170
+ and self.config.dp_delta is not None
171
+ ):
168
172
  from .privacy_budget import add_noise
169
- confidence_score = add_noise(confidence_score, 0.1, self.config.dp_epsilon, self.config.dp_delta)
173
+
174
+ confidence_score = add_noise(
175
+ confidence_score, 0.1, self.config.dp_epsilon, self.config.dp_delta
176
+ )
170
177
  confidence_score = max(0.0, min(1.0, confidence_score))
171
178
  frontmatter = {
172
179
  "schema_version": "1.0",
@@ -277,12 +284,53 @@ class Distiller:
277
284
  clusters_processed = len(clusters)
278
285
  facts_extracted = facts_count
279
286
  episodes_archived = archived
280
- if self.config.use_dp and self.config.dp_epsilon is not None and self.config.dp_delta is not None:
287
+ if (
288
+ self.config.use_dp
289
+ and self.config.dp_epsilon is not None
290
+ and self.config.dp_delta is not None
291
+ ):
281
292
  from .privacy_budget import add_noise
293
+
282
294
  sensitivity = 1.0
283
- clusters_processed = max(0, int(round(add_noise(float(clusters_processed), sensitivity, self.config.dp_epsilon, self.config.dp_delta))))
284
- facts_extracted = max(0, int(round(add_noise(float(facts_extracted), sensitivity, self.config.dp_epsilon, self.config.dp_delta))))
285
- episodes_archived = max(0, int(round(add_noise(float(episodes_archived), sensitivity, self.config.dp_epsilon, self.config.dp_delta))))
295
+ clusters_processed = max(
296
+ 0,
297
+ int(
298
+ round(
299
+ add_noise(
300
+ float(clusters_processed),
301
+ sensitivity,
302
+ self.config.dp_epsilon,
303
+ self.config.dp_delta,
304
+ )
305
+ )
306
+ ),
307
+ )
308
+ facts_extracted = max(
309
+ 0,
310
+ int(
311
+ round(
312
+ add_noise(
313
+ float(facts_extracted),
314
+ sensitivity,
315
+ self.config.dp_epsilon,
316
+ self.config.dp_delta,
317
+ )
318
+ )
319
+ ),
320
+ )
321
+ episodes_archived = max(
322
+ 0,
323
+ int(
324
+ round(
325
+ add_noise(
326
+ float(episodes_archived),
327
+ sensitivity,
328
+ self.config.dp_epsilon,
329
+ self.config.dp_delta,
330
+ )
331
+ )
332
+ ),
333
+ )
286
334
 
287
335
  return DistillerResult(
288
336
  success=True,
memvcs/core/federated.py CHANGED
@@ -48,6 +48,7 @@ def _extract_topic_from_md(path: Path, content: str) -> str:
48
48
  if end > 0:
49
49
  try:
50
50
  import yaml
51
+
51
52
  fm = yaml.safe_load(content[3:end])
52
53
  if isinstance(fm, dict):
53
54
  tags = fm.get("tags", [])
@@ -62,7 +63,11 @@ def _extract_topic_from_md(path: Path, content: str) -> str:
62
63
 
63
64
 
64
65
  def produce_local_summary(
65
- repo_root: Path, memory_types: List[str], use_dp: bool = False, dp_epsilon: float = 0.1, dp_delta: float = 1e-5
66
+ repo_root: Path,
67
+ memory_types: List[str],
68
+ use_dp: bool = False,
69
+ dp_epsilon: float = 0.1,
70
+ dp_delta: float = 1e-5,
66
71
  ) -> Dict[str, Any]:
67
72
  """
68
73
  Produce a local summary from episodic/semantic data (no raw content).
@@ -101,10 +106,15 @@ def produce_local_summary(
101
106
 
102
107
  if use_dp and dp_epsilon and dp_delta:
103
108
  from .privacy_budget import add_noise
109
+
104
110
  for mtype in summary["topics"]:
105
111
  raw = summary["topics"][mtype]
106
- summary["topics"][mtype] = max(0, int(round(add_noise(float(raw), 1.0, dp_epsilon, dp_delta))))
107
- summary["fact_count"] = max(0, int(round(add_noise(float(summary["fact_count"]), 1.0, dp_epsilon, dp_delta))))
112
+ summary["topics"][mtype] = max(
113
+ 0, int(round(add_noise(float(raw), 1.0, dp_epsilon, dp_delta)))
114
+ )
115
+ summary["fact_count"] = max(
116
+ 0, int(round(add_noise(float(summary["fact_count"]), 1.0, dp_epsilon, dp_delta)))
117
+ )
108
118
 
109
119
  return summary
110
120
 
memvcs/core/gardener.py CHANGED
@@ -356,11 +356,26 @@ class Gardener:
356
356
 
357
357
  # Generate frontmatter (optionally noised for differential privacy)
358
358
  source_episodes = len(cluster.episodes)
359
- if self.config.use_dp and self.config.dp_epsilon is not None and self.config.dp_delta is not None:
359
+ if (
360
+ self.config.use_dp
361
+ and self.config.dp_epsilon is not None
362
+ and self.config.dp_delta is not None
363
+ ):
360
364
  from .privacy_budget import add_noise
361
- source_episodes = max(0, int(round(add_noise(
362
- float(source_episodes), 1.0, self.config.dp_epsilon, self.config.dp_delta
363
- ))))
365
+
366
+ source_episodes = max(
367
+ 0,
368
+ int(
369
+ round(
370
+ add_noise(
371
+ float(source_episodes),
372
+ 1.0,
373
+ self.config.dp_epsilon,
374
+ self.config.dp_delta,
375
+ )
376
+ )
377
+ ),
378
+ )
364
379
  frontmatter = {
365
380
  "schema_version": "1.0",
366
381
  "last_updated": datetime.utcnow().isoformat() + "Z",
@@ -499,12 +514,53 @@ class Gardener:
499
514
  clusters_found = len(clusters)
500
515
  insights_generated = insights_written
501
516
  episodes_archived = archived_count
502
- if self.config.use_dp and self.config.dp_epsilon is not None and self.config.dp_delta is not None:
517
+ if (
518
+ self.config.use_dp
519
+ and self.config.dp_epsilon is not None
520
+ and self.config.dp_delta is not None
521
+ ):
503
522
  from .privacy_budget import add_noise
523
+
504
524
  sensitivity = 1.0
505
- clusters_found = max(0, int(round(add_noise(float(clusters_found), sensitivity, self.config.dp_epsilon, self.config.dp_delta))))
506
- insights_generated = max(0, int(round(add_noise(float(insights_generated), sensitivity, self.config.dp_epsilon, self.config.dp_delta))))
507
- episodes_archived = max(0, int(round(add_noise(float(episodes_archived), sensitivity, self.config.dp_epsilon, self.config.dp_delta))))
525
+ clusters_found = max(
526
+ 0,
527
+ int(
528
+ round(
529
+ add_noise(
530
+ float(clusters_found),
531
+ sensitivity,
532
+ self.config.dp_epsilon,
533
+ self.config.dp_delta,
534
+ )
535
+ )
536
+ ),
537
+ )
538
+ insights_generated = max(
539
+ 0,
540
+ int(
541
+ round(
542
+ add_noise(
543
+ float(insights_generated),
544
+ sensitivity,
545
+ self.config.dp_epsilon,
546
+ self.config.dp_delta,
547
+ )
548
+ )
549
+ ),
550
+ )
551
+ episodes_archived = max(
552
+ 0,
553
+ int(
554
+ round(
555
+ add_noise(
556
+ float(episodes_archived),
557
+ sensitivity,
558
+ self.config.dp_epsilon,
559
+ self.config.dp_delta,
560
+ )
561
+ )
562
+ ),
563
+ )
508
564
 
509
565
  return GardenerResult(
510
566
  success=True,
@@ -103,8 +103,7 @@ def _add_to_ipfs_gateway(bundle: bytes, gateway_url: str) -> Optional[str]:
103
103
  body = (
104
104
  b"--" + boundary.encode() + b"\r\n"
105
105
  b'Content-Disposition: form-data; name="file"; filename="agmem-bundle.bin"\r\n'
106
- b"Content-Type: application/octet-stream\r\n\r\n"
107
- + bundle + b"\r\n"
106
+ b"Content-Type: application/octet-stream\r\n\r\n" + bundle + b"\r\n"
108
107
  b"--" + boundary.encode() + b"--\r\n"
109
108
  )
110
109
  try:
@@ -326,7 +326,9 @@ class KnowledgeGraphBuilder:
326
326
  common = file_entities.get(path1, set()) & file_entities.get(path2, set())
327
327
  if common:
328
328
  w = min(1.0, 0.3 + 0.1 * len(common))
329
- edge = GraphEdge(source=path1, target=path2, edge_type="co_occurrence", weight=w)
329
+ edge = GraphEdge(
330
+ source=path1, target=path2, edge_type="co_occurrence", weight=w
331
+ )
330
332
  edges.append(edge)
331
333
  if self._graph is not None:
332
334
  self._graph.add_edge(path1, path2, type="co_occurrence", weight=w)
memvcs/core/objects.py CHANGED
@@ -110,6 +110,7 @@ class ObjectStore:
110
110
  # Try pack file when loose object missing
111
111
  try:
112
112
  from .pack import retrieve_from_pack
113
+
113
114
  result = retrieve_from_pack(self.objects_dir, hash_id, expected_type=obj_type)
114
115
  if result is not None:
115
116
  return result[1]
@@ -126,6 +127,7 @@ class ObjectStore:
126
127
  return True
127
128
  try:
128
129
  from .pack import retrieve_from_pack
130
+
129
131
  return retrieve_from_pack(self.objects_dir, hash_id, expected_type=obj_type) is not None
130
132
  except Exception:
131
133
  return False
memvcs/core/pack.py CHANGED
@@ -21,7 +21,12 @@ OBJ_TYPE_BLOB = 1
21
21
  OBJ_TYPE_TREE = 2
22
22
  OBJ_TYPE_COMMIT = 3
23
23
  OBJ_TYPE_TAG = 4
24
- TYPE_TO_BYTE = {"blob": OBJ_TYPE_BLOB, "tree": OBJ_TYPE_TREE, "commit": OBJ_TYPE_COMMIT, "tag": OBJ_TYPE_TAG}
24
+ TYPE_TO_BYTE = {
25
+ "blob": OBJ_TYPE_BLOB,
26
+ "tree": OBJ_TYPE_TREE,
27
+ "commit": OBJ_TYPE_COMMIT,
28
+ "tag": OBJ_TYPE_TAG,
29
+ }
25
30
  BYTE_TO_TYPE = {v: k for k, v in TYPE_TO_BYTE.items()}
26
31
 
27
32
 
@@ -152,7 +157,12 @@ def write_pack(
152
157
  if not index_entries:
153
158
  raise ValueError("No objects to pack")
154
159
 
155
- pack_content = PACK_MAGIC + struct.pack(">I", PACK_VERSION) + struct.pack(">I", len(index_entries)) + bytes(pack_body)
160
+ pack_content = (
161
+ PACK_MAGIC
162
+ + struct.pack(">I", PACK_VERSION)
163
+ + struct.pack(">I", len(index_entries))
164
+ + bytes(pack_body)
165
+ )
156
166
  pack_hash = hashlib.sha256(pack_content).digest()
157
167
  pack_content += pack_hash
158
168
 
@@ -160,7 +170,9 @@ def write_pack(
160
170
  pack_path = pack_d / pack_name
161
171
  pack_path.write_bytes(pack_content)
162
172
 
163
- index_content = bytearray(IDX_MAGIC + struct.pack(">I", IDX_VERSION) + struct.pack(">I", len(index_entries)))
173
+ index_content = bytearray(
174
+ IDX_MAGIC + struct.pack(">I", IDX_VERSION) + struct.pack(">I", len(index_entries))
175
+ )
164
176
  for hash_id, obj_type, off in index_entries:
165
177
  index_content.extend(bytes.fromhex(hash_id))
166
178
  index_content.append(TYPE_TO_BYTE[obj_type])
@@ -184,7 +196,9 @@ def _find_pack_index(objects_dir: Path) -> Optional[Path]:
184
196
  return None
185
197
 
186
198
 
187
- def retrieve_from_pack(objects_dir: Path, hash_id: str, expected_type: Optional[str] = None) -> Optional[Tuple[str, bytes]]:
199
+ def retrieve_from_pack(
200
+ objects_dir: Path, hash_id: str, expected_type: Optional[str] = None
201
+ ) -> Optional[Tuple[str, bytes]]:
188
202
  """
189
203
  Retrieve object from pack by hash. Returns (obj_type, content) or None.
190
204
  If expected_type is set, only return if pack type matches.
memvcs/core/remote.py CHANGED
@@ -70,6 +70,7 @@ def _collect_objects_from_commit(store: ObjectStore, commit_hash: str) -> Set[st
70
70
  def _read_object_from_adapter(adapter: Any, hash_id: str) -> Optional[tuple]:
71
71
  """Read object from storage adapter. Returns (obj_type, content_bytes) or None."""
72
72
  import zlib
73
+
73
74
  for obj_type in ["commit", "tree", "blob", "tag"]:
74
75
  rel = f".mem/objects/{obj_type}/{hash_id[:2]}/{hash_id[2:]}"
75
76
  if not adapter.exists(rel):
@@ -78,7 +79,7 @@ def _read_object_from_adapter(adapter: Any, hash_id: str) -> Optional[tuple]:
78
79
  raw = adapter.read_file(rel)
79
80
  full = zlib.decompress(raw)
80
81
  null_idx = full.index(b"\0")
81
- content = full[null_idx + 1:]
82
+ content = full[null_idx + 1 :]
82
83
  return (obj_type, content)
83
84
  except Exception:
84
85
  continue
@@ -240,7 +241,10 @@ class Remote:
240
241
  adapter.write_file(f".mem/refs/tags/{t}", (ch + "\n").encode())
241
242
  try:
242
243
  from .audit import append_audit
243
- append_audit(self.mem_dir, "push", {"remote": self.name, "branch": branch, "copied": copied})
244
+
245
+ append_audit(
246
+ self.mem_dir, "push", {"remote": self.name, "branch": branch, "copied": copied}
247
+ )
244
248
  except Exception:
245
249
  pass
246
250
  return f"Pushed {copied} object(s) to {self.name}"
@@ -290,7 +294,10 @@ class Remote:
290
294
  break
291
295
  try:
292
296
  from .audit import append_audit
293
- append_audit(self.mem_dir, "fetch", {"remote": self.name, "branch": branch, "copied": copied})
297
+
298
+ append_audit(
299
+ self.mem_dir, "fetch", {"remote": self.name, "branch": branch, "copied": copied}
300
+ )
294
301
  except Exception:
295
302
  pass
296
303
  return f"Fetched {copied} object(s) from {self.name}"
@@ -308,6 +315,7 @@ class Remote:
308
315
  try:
309
316
  from .storage import get_adapter
310
317
  from .storage.base import LockError
318
+
311
319
  adapter = get_adapter(url, self._config)
312
320
  lock_name = "agmem-push"
313
321
  adapter.acquire_lock(lock_name, 30)
@@ -423,6 +431,7 @@ class Remote:
423
431
  try:
424
432
  from .storage import get_adapter
425
433
  from .storage.base import LockError
434
+
426
435
  adapter = get_adapter(url, self._config)
427
436
  lock_name = "agmem-fetch"
428
437
  adapter.acquire_lock(lock_name, 30)
memvcs/core/zk_proofs.py CHANGED
@@ -79,6 +79,7 @@ def prove_memory_freshness(
79
79
  stat = memory_path.stat()
80
80
  ts = stat.st_mtime
81
81
  from datetime import datetime, timezone
82
+
82
83
  iso_ts = datetime.fromtimestamp(ts, tz=timezone.utc).isoformat()
83
84
  except Exception:
84
85
  return False
@@ -150,6 +151,7 @@ def verify_proof(proof_path: Path, statement_type: str, **kwargs: Any) -> bool:
150
151
  return False
151
152
  try:
152
153
  from datetime import datetime
154
+
153
155
  after_dt = datetime.fromisoformat(after_ts.replace("Z", "+00:00"))
154
156
  ts_dt = datetime.fromisoformat(ts_str.replace("Z", "+00:00"))
155
157
  return ts_dt >= after_dt
File without changes