agmem 0.1.6__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {agmem-0.1.6.dist-info → agmem-0.2.1.dist-info}/METADATA +15 -8
- {agmem-0.1.6.dist-info → agmem-0.2.1.dist-info}/RECORD +25 -16
- memvcs/__init__.py +1 -1
- memvcs/cli.py +1 -1
- memvcs/commands/daemon.py +37 -1
- memvcs/commands/distill.py +6 -0
- memvcs/coordinator/__init__.py +5 -0
- memvcs/coordinator/server.py +239 -0
- memvcs/core/compression_metrics.py +248 -0
- memvcs/core/delta.py +258 -0
- memvcs/core/distiller.py +76 -61
- memvcs/core/fast_similarity.py +404 -0
- memvcs/core/federated.py +13 -2
- memvcs/core/gardener.py +8 -68
- memvcs/core/pack.py +192 -34
- memvcs/core/privacy_validator.py +187 -0
- memvcs/core/protocol_builder.py +198 -0
- memvcs/core/remote.py +82 -2
- memvcs/core/zk_proofs.py +62 -5
- memvcs/health/__init__.py +25 -0
- memvcs/health/monitor.py +452 -0
- {agmem-0.1.6.dist-info → agmem-0.2.1.dist-info}/WHEEL +0 -0
- {agmem-0.1.6.dist-info → agmem-0.2.1.dist-info}/entry_points.txt +0 -0
- {agmem-0.1.6.dist-info → agmem-0.2.1.dist-info}/licenses/LICENSE +0 -0
- {agmem-0.1.6.dist-info → agmem-0.2.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Protocol Builder for federated agent summaries.
|
|
3
|
+
|
|
4
|
+
Ensures client-side summaries conform to the server's PushRequest schema
|
|
5
|
+
before transmission, preventing 422 Validation Errors and protocol mismatches.
|
|
6
|
+
|
|
7
|
+
Provides:
|
|
8
|
+
- ClientSummaryBuilder: Constructs AgentSummary from raw produce_local_summary output
|
|
9
|
+
- SchemaValidationError: Raised when summary doesn't match server schema
|
|
10
|
+
- Deterministic agent_id generation from repository content
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import hashlib
|
|
14
|
+
import json
|
|
15
|
+
from datetime import datetime, timezone
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Any, Dict, List, Optional
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class SchemaValidationError(Exception):
|
|
21
|
+
"""Raised when client summary doesn't match server schema."""
|
|
22
|
+
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ClientSummaryBuilder:
|
|
27
|
+
"""Build protocol-compliant AgentSummary from raw produce_local_summary output.
|
|
28
|
+
|
|
29
|
+
Handles:
|
|
30
|
+
- Key name mapping (topics -> topic_counts)
|
|
31
|
+
- Fact count to fact_hashes conversion (int -> list of hash strings)
|
|
32
|
+
- Auto-generation of agent_id from repo hash (deterministic, replayable)
|
|
33
|
+
- ISO-8601 timestamp addition
|
|
34
|
+
- Schema validation against server expectations
|
|
35
|
+
- Wrapping in {"summary": {...}} envelope
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
REQUIRED_FIELDS = {"agent_id", "timestamp", "topic_counts", "fact_hashes"}
|
|
39
|
+
|
|
40
|
+
@staticmethod
|
|
41
|
+
def generate_agent_id(repo_root: Path) -> str:
|
|
42
|
+
"""Generate deterministic agent_id from repository content.
|
|
43
|
+
|
|
44
|
+
Uses SHA-256 hash of repo root path to ensure consistency across runs
|
|
45
|
+
while remaining unique per repository. This is deterministic (same repo
|
|
46
|
+
always gets same agent_id) and replayable.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
repo_root: Path to the repository root
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
Unique agent identifier in format: "agent-<first-16-chars-of-hash>"
|
|
53
|
+
"""
|
|
54
|
+
repo_hash = hashlib.sha256(str(repo_root.resolve()).encode()).hexdigest()[:16]
|
|
55
|
+
return f"agent-{repo_hash}"
|
|
56
|
+
|
|
57
|
+
@staticmethod
|
|
58
|
+
def build(
|
|
59
|
+
repo_root: Path,
|
|
60
|
+
raw_summary: Dict[str, Any],
|
|
61
|
+
strict_mode: bool = False,
|
|
62
|
+
) -> Dict[str, Any]:
|
|
63
|
+
"""Build protocol-compliant summary from raw produce_local_summary output.
|
|
64
|
+
|
|
65
|
+
Transforms the client's produce_local_summary() output into the format
|
|
66
|
+
expected by the server's PushRequest model.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
repo_root: Path to repository root (used for agent_id generation)
|
|
70
|
+
raw_summary: Output from produce_local_summary()
|
|
71
|
+
strict_mode: If True, raise on validation error; if False, warn and repair
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Dict with structure: {"summary": {"agent_id": "...", "timestamp": "...",
|
|
75
|
+
"topic_counts": {...}, "fact_hashes": [...]}}
|
|
76
|
+
|
|
77
|
+
Raises:
|
|
78
|
+
SchemaValidationError: If strict_mode=True and schema validation fails
|
|
79
|
+
"""
|
|
80
|
+
# In strict mode, validate raw input has required fields BEFORE transformation
|
|
81
|
+
if strict_mode:
|
|
82
|
+
required_raw_fields = {"memory_types", "topics", "topic_hashes", "fact_count"}
|
|
83
|
+
missing = required_raw_fields - set(raw_summary.keys())
|
|
84
|
+
if missing:
|
|
85
|
+
raise SchemaValidationError(
|
|
86
|
+
f"Raw summary missing required fields: {', '.join(sorted(missing))}"
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Generate required fields
|
|
90
|
+
agent_id = ClientSummaryBuilder.generate_agent_id(repo_root)
|
|
91
|
+
timestamp = datetime.now(timezone.utc).isoformat()
|
|
92
|
+
|
|
93
|
+
# Transform key names and structure
|
|
94
|
+
topic_counts = raw_summary.get("topics", {})
|
|
95
|
+
if not isinstance(topic_counts, dict):
|
|
96
|
+
topic_counts = {}
|
|
97
|
+
|
|
98
|
+
# Convert fact_count (int) to fact_hashes (list of strings)
|
|
99
|
+
# If topic_hashes is present, use it; otherwise generate from fact_count
|
|
100
|
+
fact_hashes: List[str] = []
|
|
101
|
+
if "topic_hashes" in raw_summary and isinstance(raw_summary["topic_hashes"], dict):
|
|
102
|
+
# Flatten all topic hashes into a single list
|
|
103
|
+
for topic_hash_list in raw_summary["topic_hashes"].values():
|
|
104
|
+
if isinstance(topic_hash_list, list):
|
|
105
|
+
fact_hashes.extend(topic_hash_list)
|
|
106
|
+
|
|
107
|
+
# If fact_hashes is still empty but we have fact_count, generate placeholder hashes
|
|
108
|
+
if not fact_hashes and "fact_count" in raw_summary:
|
|
109
|
+
fact_count = raw_summary["fact_count"]
|
|
110
|
+
if isinstance(fact_count, int):
|
|
111
|
+
# Generate placeholder hashes (in real scenario, client would preserve actual hashes)
|
|
112
|
+
fact_hashes = [
|
|
113
|
+
hashlib.sha256(f"fact-{i}".encode()).hexdigest() for i in range(fact_count)
|
|
114
|
+
]
|
|
115
|
+
|
|
116
|
+
# Build AgentSummary structure
|
|
117
|
+
agent_summary = {
|
|
118
|
+
"agent_id": agent_id,
|
|
119
|
+
"timestamp": timestamp,
|
|
120
|
+
"topic_counts": topic_counts,
|
|
121
|
+
"fact_hashes": fact_hashes,
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
# Validate schema
|
|
125
|
+
errors = ClientSummaryBuilder._validate_schema(agent_summary)
|
|
126
|
+
if errors:
|
|
127
|
+
error_msg = f"Schema validation failed:\n" + "\n".join(f" - {e}" for e in errors)
|
|
128
|
+
if strict_mode:
|
|
129
|
+
raise SchemaValidationError(error_msg)
|
|
130
|
+
else:
|
|
131
|
+
print(f"Warning: {error_msg}")
|
|
132
|
+
|
|
133
|
+
# Return wrapped in envelope
|
|
134
|
+
return {"summary": agent_summary}
|
|
135
|
+
|
|
136
|
+
@staticmethod
|
|
137
|
+
def _validate_schema(agent_summary: Dict[str, Any]) -> List[str]:
|
|
138
|
+
"""Validate agent_summary against expected schema.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
agent_summary: The summary dict to validate
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
List of error messages (empty if valid)
|
|
145
|
+
"""
|
|
146
|
+
errors = []
|
|
147
|
+
|
|
148
|
+
# Check required fields
|
|
149
|
+
for field in ClientSummaryBuilder.REQUIRED_FIELDS:
|
|
150
|
+
if field not in agent_summary:
|
|
151
|
+
errors.append(f"Missing required field: {field}")
|
|
152
|
+
|
|
153
|
+
# Validate field types
|
|
154
|
+
if "agent_id" in agent_summary and not isinstance(agent_summary["agent_id"], str):
|
|
155
|
+
errors.append(f"agent_id must be string, got {type(agent_summary['agent_id'])}")
|
|
156
|
+
|
|
157
|
+
if "timestamp" in agent_summary:
|
|
158
|
+
ts = agent_summary["timestamp"]
|
|
159
|
+
if not isinstance(ts, str):
|
|
160
|
+
errors.append(f"timestamp must be string, got {type(ts)}")
|
|
161
|
+
# Validate ISO-8601 format
|
|
162
|
+
elif not _is_iso8601(ts):
|
|
163
|
+
errors.append(f"timestamp not in ISO-8601 format: {ts}")
|
|
164
|
+
|
|
165
|
+
if "topic_counts" in agent_summary:
|
|
166
|
+
tc = agent_summary["topic_counts"]
|
|
167
|
+
if not isinstance(tc, dict):
|
|
168
|
+
errors.append(f"topic_counts must be dict, got {type(tc)}")
|
|
169
|
+
else:
|
|
170
|
+
for k, v in tc.items():
|
|
171
|
+
if not isinstance(k, str):
|
|
172
|
+
errors.append(f"topic_counts key must be string, got {type(k)}")
|
|
173
|
+
if not isinstance(v, int):
|
|
174
|
+
errors.append(f"topic_counts value must be int, got {type(v)}")
|
|
175
|
+
|
|
176
|
+
if "fact_hashes" in agent_summary:
|
|
177
|
+
fh = agent_summary["fact_hashes"]
|
|
178
|
+
if not isinstance(fh, list):
|
|
179
|
+
errors.append(f"fact_hashes must be list, got {type(fh)}")
|
|
180
|
+
else:
|
|
181
|
+
for h in fh:
|
|
182
|
+
if not isinstance(h, str):
|
|
183
|
+
errors.append(f"fact_hashes element must be string, got {type(h)}")
|
|
184
|
+
|
|
185
|
+
return errors
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _is_iso8601(timestamp: str) -> bool:
|
|
189
|
+
"""Check if timestamp is in ISO-8601 format."""
|
|
190
|
+
try:
|
|
191
|
+
# Try parsing with common ISO-8601 formats
|
|
192
|
+
if timestamp.endswith("Z"):
|
|
193
|
+
datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
|
|
194
|
+
else:
|
|
195
|
+
datetime.fromisoformat(timestamp)
|
|
196
|
+
return True
|
|
197
|
+
except (ValueError, TypeError):
|
|
198
|
+
return False
|
memvcs/core/remote.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""
|
|
2
|
-
Remote sync for agmem - file-based
|
|
2
|
+
Remote sync for agmem - file-based, cloud (S3/GCS), and IPFS push/pull/clone.
|
|
3
3
|
|
|
4
|
-
Supports file
|
|
4
|
+
Supports file://, s3://, gs://, and ipfs:// URLs with optional distributed locking.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
7
|
import json
|
|
@@ -19,6 +19,11 @@ def _is_cloud_remote(url: str) -> bool:
|
|
|
19
19
|
return url.startswith("s3://") or url.startswith("gs://")
|
|
20
20
|
|
|
21
21
|
|
|
22
|
+
def _is_ipfs_remote(url: str) -> bool:
|
|
23
|
+
"""Return True if URL is IPFS (ipfs://<cid>)."""
|
|
24
|
+
return url.startswith("ipfs://")
|
|
25
|
+
|
|
26
|
+
|
|
22
27
|
def parse_remote_url(url: str) -> Path:
|
|
23
28
|
"""Parse remote URL to local path. Supports file:// only. Rejects path traversal."""
|
|
24
29
|
parsed = urlparse(url)
|
|
@@ -302,6 +307,75 @@ class Remote:
|
|
|
302
307
|
pass
|
|
303
308
|
return f"Fetched {copied} object(s) from {self.name}"
|
|
304
309
|
|
|
310
|
+
def _push_to_ipfs(self, branch: Optional[str] = None) -> str:
|
|
311
|
+
"""Push objects to IPFS and update remote URL with CID."""
|
|
312
|
+
from .ipfs_remote import push_to_ipfs
|
|
313
|
+
|
|
314
|
+
refs = RefsManager(self.mem_dir)
|
|
315
|
+
store = ObjectStore(self.objects_dir)
|
|
316
|
+
|
|
317
|
+
# Determine which branch to push
|
|
318
|
+
target_branch = branch if branch else refs.get_current_branch() or "main"
|
|
319
|
+
commit_hash = refs.get_branch_commit(target_branch)
|
|
320
|
+
|
|
321
|
+
if not commit_hash:
|
|
322
|
+
raise ValueError(f"Branch '{target_branch}' has no commit")
|
|
323
|
+
|
|
324
|
+
# Get gateway URL from config or use default
|
|
325
|
+
gateway_url = self._config.get("ipfs", {}).get("gateway", "https://ipfs.io")
|
|
326
|
+
|
|
327
|
+
# Push to IPFS
|
|
328
|
+
cid = push_to_ipfs(self.objects_dir, target_branch, commit_hash, gateway_url, store)
|
|
329
|
+
|
|
330
|
+
if not cid:
|
|
331
|
+
raise ValueError("Failed to push to IPFS gateway")
|
|
332
|
+
|
|
333
|
+
# Update remote URL to new CID for future pulls
|
|
334
|
+
self.set_remote_url(f"ipfs://{cid}")
|
|
335
|
+
|
|
336
|
+
# TODO: Pin CID to prevent garbage collection
|
|
337
|
+
# Options: local IPFS daemon (ipfshttpclient), pinning service (Pinata/Infura)
|
|
338
|
+
# For now, user must manually pin or use a pinning service
|
|
339
|
+
|
|
340
|
+
try:
|
|
341
|
+
from .audit import append_audit
|
|
342
|
+
|
|
343
|
+
append_audit(
|
|
344
|
+
self.mem_dir,
|
|
345
|
+
"push",
|
|
346
|
+
{"remote": self.name, "branch": target_branch, "ipfs_cid": cid},
|
|
347
|
+
)
|
|
348
|
+
except Exception:
|
|
349
|
+
pass
|
|
350
|
+
|
|
351
|
+
return f"Pushed to IPFS: {cid} (WARNING: Not pinned - will be garbage collected unless pinned separately)"
|
|
352
|
+
|
|
353
|
+
def _pull_from_ipfs(self, url: str) -> str:
|
|
354
|
+
"""Pull objects from IPFS by CID."""
|
|
355
|
+
from .ipfs_remote import pull_from_ipfs, parse_ipfs_url
|
|
356
|
+
|
|
357
|
+
cid = parse_ipfs_url(url)
|
|
358
|
+
if not cid:
|
|
359
|
+
raise ValueError(f"Invalid IPFS URL: {url}")
|
|
360
|
+
|
|
361
|
+
# Get gateway URL from config or use default
|
|
362
|
+
gateway_url = self._config.get("ipfs", {}).get("gateway", "https://ipfs.io")
|
|
363
|
+
|
|
364
|
+
# Pull from IPFS
|
|
365
|
+
success = pull_from_ipfs(self.objects_dir, cid, gateway_url)
|
|
366
|
+
|
|
367
|
+
if not success:
|
|
368
|
+
raise ValueError(f"Failed to pull from IPFS: {cid}")
|
|
369
|
+
|
|
370
|
+
try:
|
|
371
|
+
from .audit import append_audit
|
|
372
|
+
|
|
373
|
+
append_audit(self.mem_dir, "fetch", {"remote": self.name, "ipfs_cid": cid})
|
|
374
|
+
except Exception:
|
|
375
|
+
pass
|
|
376
|
+
|
|
377
|
+
return f"Fetched from IPFS: {cid}"
|
|
378
|
+
|
|
305
379
|
def push(self, branch: Optional[str] = None) -> str:
|
|
306
380
|
"""
|
|
307
381
|
Push objects and refs to remote.
|
|
@@ -311,6 +385,9 @@ class Remote:
|
|
|
311
385
|
if not url:
|
|
312
386
|
raise ValueError(f"Remote '{self.name}' has no URL configured")
|
|
313
387
|
|
|
388
|
+
if _is_ipfs_remote(url):
|
|
389
|
+
return self._push_to_ipfs(branch)
|
|
390
|
+
|
|
314
391
|
if _is_cloud_remote(url):
|
|
315
392
|
try:
|
|
316
393
|
from .storage import get_adapter
|
|
@@ -427,6 +504,9 @@ class Remote:
|
|
|
427
504
|
if not url:
|
|
428
505
|
raise ValueError(f"Remote '{self.name}' has no URL configured")
|
|
429
506
|
|
|
507
|
+
if _is_ipfs_remote(url):
|
|
508
|
+
return self._pull_from_ipfs(url)
|
|
509
|
+
|
|
430
510
|
if _is_cloud_remote(url):
|
|
431
511
|
try:
|
|
432
512
|
from .storage import get_adapter
|
memvcs/core/zk_proofs.py
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
"""
|
|
2
|
-
|
|
2
|
+
Cryptographic proof system for agmem.
|
|
3
|
+
|
|
4
|
+
IMPORTANT: Current implementation provides PROOF-OF-KNOWLEDGE, not true zero-knowledge proofs.
|
|
5
|
+
|
|
6
|
+
Limitations:
|
|
7
|
+
- Keyword proof leaks: word count in file, allows verifier to test other words
|
|
8
|
+
- Freshness proof: relies on forgeable filesystem mtime
|
|
9
|
+
- Both proofs reveal deterministic information about file content
|
|
10
|
+
|
|
11
|
+
For true zero-knowledge proofs, consider integrating zk-SNARK libraries like:
|
|
12
|
+
- py-ecc (Ethereum cryptography)
|
|
13
|
+
- circom (circuit compiler)
|
|
14
|
+
- libsnark bindings
|
|
3
15
|
|
|
4
16
|
Hash/signature-based proofs: keyword containment (Merkle set membership),
|
|
5
17
|
memory freshness (signed timestamp). Full zk-SNARK backend can be added later.
|
|
@@ -36,8 +48,30 @@ def _word_hashes(content: str) -> List[str]:
|
|
|
36
48
|
|
|
37
49
|
def prove_keyword_containment(memory_path: Path, keyword: str, output_proof_path: Path) -> bool:
|
|
38
50
|
"""
|
|
39
|
-
Prove memory file contains keyword
|
|
40
|
-
|
|
51
|
+
Prove memory file contains keyword using Merkle set membership.
|
|
52
|
+
|
|
53
|
+
WARNING: This is PROOF-OF-KNOWLEDGE, not zero-knowledge:
|
|
54
|
+
- Leaks exact count of unique words in file (via Merkle root)
|
|
55
|
+
- Verifier can test if OTHER words exist by hashing and checking against same root
|
|
56
|
+
- Root is deterministic over full word set
|
|
57
|
+
|
|
58
|
+
For true zero-knowledge, would need:
|
|
59
|
+
- Commitment scheme that hides set size
|
|
60
|
+
- zk-SNARK proof that keyword ∈ committed set
|
|
61
|
+
- No ability for verifier to test other words
|
|
62
|
+
|
|
63
|
+
Current implementation is useful for:
|
|
64
|
+
- Proving you possess a file containing specific keywords
|
|
65
|
+
- Auditing that memories contain required terms
|
|
66
|
+
- Not suitable for privacy-preserving keyword proofs
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
memory_path: Path to memory file
|
|
70
|
+
keyword: Keyword to prove containment of
|
|
71
|
+
output_proof_path: Where to write proof JSON
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
True if proof created successfully
|
|
41
75
|
"""
|
|
42
76
|
if not memory_path.exists() or not memory_path.is_file():
|
|
43
77
|
return False
|
|
@@ -68,8 +102,31 @@ def prove_memory_freshness(
|
|
|
68
102
|
memory_path: Path, after_timestamp: str, output_proof_path: Path, mem_dir: Optional[Path] = None
|
|
69
103
|
) -> bool:
|
|
70
104
|
"""
|
|
71
|
-
Prove memory was updated after date
|
|
72
|
-
|
|
105
|
+
Prove memory was updated after date using signed timestamp.
|
|
106
|
+
|
|
107
|
+
WARNING: Security limitations:
|
|
108
|
+
- Relies on filesystem mtime which is TRIVIALLY FORGEABLE (touch command)
|
|
109
|
+
- Only proves key holder signed *some* timestamp, not actual freshness
|
|
110
|
+
- No protection against backdating files
|
|
111
|
+
|
|
112
|
+
Improvements needed:
|
|
113
|
+
- Sign content hash + timestamp (not just timestamp)
|
|
114
|
+
- Use trusted timestamping service (RFC 3161)
|
|
115
|
+
- Blockchain-based timestamp anchoring
|
|
116
|
+
|
|
117
|
+
Current implementation is useful for:
|
|
118
|
+
- Proving you signed a file at some claimed time
|
|
119
|
+
- Creating audit trails with signature verification
|
|
120
|
+
- Not suitable for proving actual file recency
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
memory_path: Path to memory file
|
|
124
|
+
after_timestamp: Timestamp to prove freshness after (not currently enforced)
|
|
125
|
+
output_proof_path: Where to write proof JSON
|
|
126
|
+
mem_dir: Memory directory for key loading
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
True if proof created successfully
|
|
73
130
|
"""
|
|
74
131
|
if not memory_path.exists() or not memory_path.is_file():
|
|
75
132
|
return False
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Health monitoring module for agmem daemon."""
|
|
2
|
+
|
|
3
|
+
from .monitor import (
|
|
4
|
+
HealthMonitor,
|
|
5
|
+
StorageMonitor,
|
|
6
|
+
SemanticRedundancyChecker,
|
|
7
|
+
StaleMemoryDetector,
|
|
8
|
+
GraphConsistencyValidator,
|
|
9
|
+
StorageMetrics,
|
|
10
|
+
RedundancyReport,
|
|
11
|
+
StaleMemoryReport,
|
|
12
|
+
GraphConsistencyReport,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"HealthMonitor",
|
|
17
|
+
"StorageMonitor",
|
|
18
|
+
"SemanticRedundancyChecker",
|
|
19
|
+
"StaleMemoryDetector",
|
|
20
|
+
"GraphConsistencyValidator",
|
|
21
|
+
"StorageMetrics",
|
|
22
|
+
"RedundancyReport",
|
|
23
|
+
"StaleMemoryReport",
|
|
24
|
+
"GraphConsistencyReport",
|
|
25
|
+
]
|