amfs 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- amfs-0.1.0/.gitignore +16 -0
- amfs-0.1.0/PKG-INFO +9 -0
- amfs-0.1.0/pyproject.toml +18 -0
- amfs-0.1.0/src/amfs/__init__.py +41 -0
- amfs-0.1.0/src/amfs/config.py +59 -0
- amfs-0.1.0/src/amfs/factory.py +69 -0
- amfs-0.1.0/src/amfs/memory.py +1151 -0
amfs-0.1.0/.gitignore
ADDED
amfs-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: amfs
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: AMFS Python SDK — Agent Memory File System
|
|
5
|
+
License-Expression: Apache-2.0
|
|
6
|
+
Requires-Python: >=3.11
|
|
7
|
+
Requires-Dist: amfs-adapter-filesystem
|
|
8
|
+
Requires-Dist: amfs-core
|
|
9
|
+
Requires-Dist: pyyaml<7,>=6.0
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "amfs"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "AMFS Python SDK — Agent Memory File System"
|
|
5
|
+
requires-python = ">=3.11"
|
|
6
|
+
license = "Apache-2.0"
|
|
7
|
+
dependencies = [
|
|
8
|
+
"amfs-core",
|
|
9
|
+
"amfs-adapter-filesystem",
|
|
10
|
+
"pyyaml>=6.0,<7",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
[build-system]
|
|
14
|
+
requires = ["hatchling"]
|
|
15
|
+
build-backend = "hatchling.build"
|
|
16
|
+
|
|
17
|
+
[tool.hatch.build.targets.wheel]
|
|
18
|
+
packages = ["src/amfs"]
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""AMFS Python SDK — Agent Memory File System."""
|
|
2
|
+
|
|
3
|
+
from amfs_core.embedder import EmbedderABC
|
|
4
|
+
from amfs_core.models import (
|
|
5
|
+
ConflictPolicy,
|
|
6
|
+
DecisionTrace,
|
|
7
|
+
Event,
|
|
8
|
+
MemoryEntry,
|
|
9
|
+
MemoryStats,
|
|
10
|
+
MemoryType,
|
|
11
|
+
OutcomeRecord,
|
|
12
|
+
OutcomeType,
|
|
13
|
+
Provenance,
|
|
14
|
+
ProvenanceTier,
|
|
15
|
+
RecallConfig,
|
|
16
|
+
ScoredEntry,
|
|
17
|
+
SearchQuery,
|
|
18
|
+
SemanticQuery,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
from amfs.memory import AgentMemory, MemoryScope
|
|
22
|
+
|
|
23
|
+
__all__ = [
|
|
24
|
+
"AgentMemory",
|
|
25
|
+
"ConflictPolicy",
|
|
26
|
+
"DecisionTrace",
|
|
27
|
+
"EmbedderABC",
|
|
28
|
+
"Event",
|
|
29
|
+
"MemoryEntry",
|
|
30
|
+
"MemoryScope",
|
|
31
|
+
"MemoryStats",
|
|
32
|
+
"MemoryType",
|
|
33
|
+
"OutcomeRecord",
|
|
34
|
+
"OutcomeType",
|
|
35
|
+
"Provenance",
|
|
36
|
+
"ProvenanceTier",
|
|
37
|
+
"RecallConfig",
|
|
38
|
+
"ScoredEntry",
|
|
39
|
+
"SearchQuery",
|
|
40
|
+
"SemanticQuery",
|
|
41
|
+
]
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""YAML configuration loader for AMFS."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import yaml
|
|
9
|
+
|
|
10
|
+
from amfs_core.models import AMFSConfig, LayerConfig
|
|
11
|
+
|
|
12
|
+
_DEFAULT_CONFIG_NAMES = ("amfs.yaml", "amfs.yml", ".amfs.yaml", ".amfs.yml")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def find_config(start: Path | None = None) -> Path | None:
|
|
16
|
+
"""Walk up from *start* (default: cwd) looking for an AMFS config file."""
|
|
17
|
+
directory = (start or Path.cwd()).resolve()
|
|
18
|
+
while True:
|
|
19
|
+
for name in _DEFAULT_CONFIG_NAMES:
|
|
20
|
+
candidate = directory / name
|
|
21
|
+
if candidate.is_file():
|
|
22
|
+
return candidate
|
|
23
|
+
parent = directory.parent
|
|
24
|
+
if parent == directory:
|
|
25
|
+
break
|
|
26
|
+
directory = parent
|
|
27
|
+
return None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def load_config(path: Path) -> AMFSConfig:
|
|
31
|
+
"""Load and validate an AMFSConfig from a YAML file."""
|
|
32
|
+
raw: dict[str, Any] = yaml.safe_load(path.read_text(encoding="utf-8")) or {}
|
|
33
|
+
namespace = raw.get("namespace", "default")
|
|
34
|
+
layers: dict[str, LayerConfig] = {}
|
|
35
|
+
for name, layer_raw in raw.get("layers", {}).items():
|
|
36
|
+
layers[name] = LayerConfig(
|
|
37
|
+
adapter=layer_raw["adapter"],
|
|
38
|
+
options=layer_raw.get("options", {}),
|
|
39
|
+
)
|
|
40
|
+
return AMFSConfig(namespace=namespace, layers=layers)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def load_config_or_default(path: Path | None = None) -> AMFSConfig:
|
|
44
|
+
"""Load config from *path*, auto-discover, or return a sensible default."""
|
|
45
|
+
if path is not None:
|
|
46
|
+
return load_config(path)
|
|
47
|
+
found = find_config()
|
|
48
|
+
if found is not None:
|
|
49
|
+
return load_config(found)
|
|
50
|
+
# Default: filesystem adapter at .amfs/ in cwd
|
|
51
|
+
return AMFSConfig(
|
|
52
|
+
namespace="default",
|
|
53
|
+
layers={
|
|
54
|
+
"primary": LayerConfig(
|
|
55
|
+
adapter="filesystem",
|
|
56
|
+
options={"root": ".amfs"},
|
|
57
|
+
)
|
|
58
|
+
},
|
|
59
|
+
)
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""Adapter factory — instantiates adapters from AMFSConfig."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from amfs_core.abc import AdapterABC
|
|
8
|
+
from amfs_core.models import AMFSConfig, LayerConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
_ADAPTER_REGISTRY: dict[str, type] = {}
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def register_adapter(name: str, cls: type) -> None:
|
|
15
|
+
"""Register an adapter class by name."""
|
|
16
|
+
_ADAPTER_REGISTRY[name] = cls
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _ensure_builtins() -> None:
|
|
20
|
+
"""Lazily register built-in adapters."""
|
|
21
|
+
if "filesystem" not in _ADAPTER_REGISTRY:
|
|
22
|
+
from amfs_filesystem.adapter import FilesystemAdapter
|
|
23
|
+
|
|
24
|
+
register_adapter("filesystem", FilesystemAdapter)
|
|
25
|
+
|
|
26
|
+
if "postgres" not in _ADAPTER_REGISTRY:
|
|
27
|
+
try:
|
|
28
|
+
from amfs_postgres.adapter import PostgresAdapter
|
|
29
|
+
|
|
30
|
+
register_adapter("postgres", PostgresAdapter)
|
|
31
|
+
except ImportError:
|
|
32
|
+
pass # psycopg not installed
|
|
33
|
+
|
|
34
|
+
if "s3" not in _ADAPTER_REGISTRY:
|
|
35
|
+
try:
|
|
36
|
+
from amfs_s3.adapter import S3Adapter
|
|
37
|
+
|
|
38
|
+
register_adapter("s3", S3Adapter)
|
|
39
|
+
except ImportError:
|
|
40
|
+
pass # boto3 not installed
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def create_adapter(layer: LayerConfig, namespace: str) -> AdapterABC:
|
|
44
|
+
"""Create an adapter instance from a LayerConfig."""
|
|
45
|
+
_ensure_builtins()
|
|
46
|
+
cls = _ADAPTER_REGISTRY.get(layer.adapter)
|
|
47
|
+
if cls is None:
|
|
48
|
+
raise ValueError(
|
|
49
|
+
f"Unknown adapter '{layer.adapter}'. "
|
|
50
|
+
f"Available: {sorted(_ADAPTER_REGISTRY.keys())}"
|
|
51
|
+
)
|
|
52
|
+
options = dict(layer.options)
|
|
53
|
+
# Normalise filesystem root to Path
|
|
54
|
+
if layer.adapter == "filesystem" and "root" in options:
|
|
55
|
+
options["root"] = Path(options["root"])
|
|
56
|
+
return cls(namespace=namespace, **options)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def create_adapter_from_config(
|
|
60
|
+
config: AMFSConfig, *, layer_name: str = "primary"
|
|
61
|
+
) -> AdapterABC:
|
|
62
|
+
"""Create an adapter from a full config, selecting the named layer."""
|
|
63
|
+
layer = config.layers.get(layer_name)
|
|
64
|
+
if layer is None:
|
|
65
|
+
raise KeyError(
|
|
66
|
+
f"Layer '{layer_name}' not found in config. "
|
|
67
|
+
f"Available: {sorted(config.layers.keys())}"
|
|
68
|
+
)
|
|
69
|
+
return create_adapter(layer, config.namespace)
|
|
@@ -0,0 +1,1151 @@
|
|
|
1
|
+
"""AgentMemory — the main SDK entry point for agents."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import math
|
|
7
|
+
from collections import defaultdict
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Callable
|
|
11
|
+
|
|
12
|
+
from amfs_core.abc import AdapterABC, WatchHandle
|
|
13
|
+
from amfs_core.embedder import EmbedderABC
|
|
14
|
+
from amfs_core.engine import CausalTagger, CoWEngine, ReadTracker
|
|
15
|
+
from amfs_core.exceptions import StaleWriteError
|
|
16
|
+
from amfs_core.lifecycle import LifecycleManager
|
|
17
|
+
from amfs_core.models import (
|
|
18
|
+
ConflictPolicy,
|
|
19
|
+
DecisionTrace,
|
|
20
|
+
ErrorEvent,
|
|
21
|
+
Event,
|
|
22
|
+
EventType,
|
|
23
|
+
ExternalContext,
|
|
24
|
+
GraphEdge,
|
|
25
|
+
GraphNeighborQuery,
|
|
26
|
+
MemoryEntry,
|
|
27
|
+
MemoryStateDiff,
|
|
28
|
+
MemoryStats,
|
|
29
|
+
MemoryType,
|
|
30
|
+
OutcomeType,
|
|
31
|
+
QueryEvent,
|
|
32
|
+
RecallConfig,
|
|
33
|
+
ScopeInfo,
|
|
34
|
+
ScoredEntry,
|
|
35
|
+
SearchQuery,
|
|
36
|
+
SemanticQuery,
|
|
37
|
+
TraceEntry,
|
|
38
|
+
)
|
|
39
|
+
from amfs_core.outcome import OutcomeBackPropagator
|
|
40
|
+
|
|
41
|
+
from amfs.config import load_config_or_default
|
|
42
|
+
from amfs.factory import create_adapter_from_config
|
|
43
|
+
|
|
44
|
+
logger = logging.getLogger(__name__)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class AgentMemory:
|
|
48
|
+
"""High-level API for agents to read, write, and observe shared memory.
|
|
49
|
+
|
|
50
|
+
Features:
|
|
51
|
+
|
|
52
|
+
- **Auto-causal tracking**: every ``read()`` is logged. ``commit_outcome()``
|
|
53
|
+
auto-links to everything this session read.
|
|
54
|
+
- **Confidence decay**: stale entries lose effective confidence over time.
|
|
55
|
+
- **Rich search**: filter by confidence, agent, recency, pattern refs.
|
|
56
|
+
- **Semantic search**: find entries by meaning using pluggable embedders.
|
|
57
|
+
- **Conflict detection**: detect when another agent modified an entry
|
|
58
|
+
since your last read.
|
|
59
|
+
- **Memory stats**: aggregate introspection for debugging and UIs.
|
|
60
|
+
|
|
61
|
+
Usage::
|
|
62
|
+
|
|
63
|
+
with AgentMemory(agent_id="review-agent") as mem:
|
|
64
|
+
mem.write("checkout-service", "retry-pattern", {"max_retries": 3})
|
|
65
|
+
entry = mem.read("checkout-service", "retry-pattern")
|
|
66
|
+
mem.commit_outcome("INC-001", OutcomeType.P1_INCIDENT)
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
def __init__(
|
|
70
|
+
self,
|
|
71
|
+
agent_id: str,
|
|
72
|
+
*,
|
|
73
|
+
session_id: str | None = None,
|
|
74
|
+
config_path: Path | None = None,
|
|
75
|
+
adapter: AdapterABC | None = None,
|
|
76
|
+
ttl_sweep_interval: float | None = None,
|
|
77
|
+
decay_half_life_days: float | None = None,
|
|
78
|
+
embedder: EmbedderABC | None = None,
|
|
79
|
+
conflict_policy: ConflictPolicy = ConflictPolicy.LAST_WRITE_WINS,
|
|
80
|
+
on_conflict: Callable[[MemoryEntry, MemoryEntry, Any], Any] | None = None,
|
|
81
|
+
importance_evaluator: Any | None = None,
|
|
82
|
+
) -> None:
|
|
83
|
+
self._config = load_config_or_default(config_path)
|
|
84
|
+
|
|
85
|
+
if adapter is not None:
|
|
86
|
+
self._adapter = adapter
|
|
87
|
+
else:
|
|
88
|
+
self._adapter = create_adapter_from_config(self._config)
|
|
89
|
+
|
|
90
|
+
self._tagger = CausalTagger(agent_id, session_id)
|
|
91
|
+
self._read_tracker = ReadTracker()
|
|
92
|
+
self._engine = CoWEngine(self._adapter, self._tagger, self._read_tracker)
|
|
93
|
+
self._propagator = OutcomeBackPropagator(self._adapter)
|
|
94
|
+
self._decay_half_life_days = decay_half_life_days
|
|
95
|
+
self._embedder = embedder
|
|
96
|
+
self._conflict_policy = conflict_policy
|
|
97
|
+
self._on_conflict = on_conflict
|
|
98
|
+
self._importance_evaluator = importance_evaluator
|
|
99
|
+
self._branch = "main"
|
|
100
|
+
|
|
101
|
+
self._lifecycle: LifecycleManager | None = None
|
|
102
|
+
if ttl_sweep_interval is not None:
|
|
103
|
+
self._lifecycle = LifecycleManager(self._adapter, interval=ttl_sweep_interval)
|
|
104
|
+
self._lifecycle.start()
|
|
105
|
+
|
|
106
|
+
self._adapter.ensure_agent(self.agent_id, self._config.namespace)
|
|
107
|
+
|
|
108
|
+
# ------------------------------------------------------------------
|
|
109
|
+
# Properties
|
|
110
|
+
# ------------------------------------------------------------------
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def agent_id(self) -> str:
|
|
114
|
+
return self._tagger.agent_id
|
|
115
|
+
|
|
116
|
+
@property
|
|
117
|
+
def session_id(self) -> str:
|
|
118
|
+
return self._tagger.session_id
|
|
119
|
+
|
|
120
|
+
@property
|
|
121
|
+
def namespace(self) -> str:
|
|
122
|
+
return self._config.namespace
|
|
123
|
+
|
|
124
|
+
@property
|
|
125
|
+
def adapter(self) -> AdapterABC:
|
|
126
|
+
return self._adapter
|
|
127
|
+
|
|
128
|
+
@property
|
|
129
|
+
def read_log(self) -> list[str]:
|
|
130
|
+
"""Entry keys read during this session (for inspection/debugging)."""
|
|
131
|
+
return self._read_tracker.causal_keys
|
|
132
|
+
|
|
133
|
+
# ------------------------------------------------------------------
|
|
134
|
+
# Core operations
|
|
135
|
+
# ------------------------------------------------------------------
|
|
136
|
+
|
|
137
|
+
def read(
|
|
138
|
+
self,
|
|
139
|
+
entity_path: str,
|
|
140
|
+
key: str,
|
|
141
|
+
*,
|
|
142
|
+
min_confidence: float = 0.0,
|
|
143
|
+
branch: str | None = None,
|
|
144
|
+
) -> MemoryEntry | None:
|
|
145
|
+
"""Read the current version of a key.
|
|
146
|
+
|
|
147
|
+
Automatically tracked for causal linking and conflict detection.
|
|
148
|
+
If *decay_half_life_days* is set, applies confidence decay before
|
|
149
|
+
the min_confidence check. Private entries from other agents are
|
|
150
|
+
not visible — use ``recall()`` to access your own private entries.
|
|
151
|
+
"""
|
|
152
|
+
import time
|
|
153
|
+
|
|
154
|
+
effective_branch = branch or self._branch
|
|
155
|
+
start = time.monotonic()
|
|
156
|
+
try:
|
|
157
|
+
if self._decay_half_life_days is not None:
|
|
158
|
+
entry = self._engine.read(entity_path, key, min_confidence=0.0, branch=effective_branch)
|
|
159
|
+
if entry is None:
|
|
160
|
+
return None
|
|
161
|
+
if not entry.shared and entry.provenance.agent_id != self.agent_id:
|
|
162
|
+
return None
|
|
163
|
+
effective = entry.effective_confidence(
|
|
164
|
+
decay_half_life_days=self._decay_half_life_days,
|
|
165
|
+
)
|
|
166
|
+
if effective < min_confidence:
|
|
167
|
+
return None
|
|
168
|
+
return entry
|
|
169
|
+
entry = self._engine.read(entity_path, key, min_confidence=min_confidence, branch=effective_branch)
|
|
170
|
+
if entry is not None and not entry.shared and entry.provenance.agent_id != self.agent_id:
|
|
171
|
+
return None
|
|
172
|
+
return entry
|
|
173
|
+
except Exception as exc:
|
|
174
|
+
self._read_tracker.record_error(
|
|
175
|
+
"read", type(exc).__name__, str(exc),
|
|
176
|
+
)
|
|
177
|
+
raise
|
|
178
|
+
|
|
179
|
+
def write(
|
|
180
|
+
self,
|
|
181
|
+
entity_path: str,
|
|
182
|
+
key: str,
|
|
183
|
+
value: Any,
|
|
184
|
+
*,
|
|
185
|
+
confidence: float = 1.0,
|
|
186
|
+
ttl_at: datetime | None = None,
|
|
187
|
+
pattern_refs: list[str] | None = None,
|
|
188
|
+
memory_type: MemoryType = MemoryType.FACT,
|
|
189
|
+
artifact_refs: list | None = None,
|
|
190
|
+
shared: bool = True,
|
|
191
|
+
branch: str | None = None,
|
|
192
|
+
) -> MemoryEntry:
|
|
193
|
+
"""Write a new version of a key with automatic provenance.
|
|
194
|
+
|
|
195
|
+
If *conflict_policy* is ``RAISE``, checks whether the entry was
|
|
196
|
+
modified by another agent since our last read and raises
|
|
197
|
+
``StaleWriteError`` if so. If an ``on_conflict`` callback is set,
|
|
198
|
+
it is called with ``(our_last_read, current_entry, new_value)``
|
|
199
|
+
and should return the merged value to write.
|
|
200
|
+
"""
|
|
201
|
+
effective_branch = branch or self._branch
|
|
202
|
+
entry_key = f"{entity_path}/{key}"
|
|
203
|
+
read_version = self._read_tracker.read_version(entry_key)
|
|
204
|
+
|
|
205
|
+
if read_version is not None:
|
|
206
|
+
current = self._adapter.read(entity_path, key)
|
|
207
|
+
if (
|
|
208
|
+
current is not None
|
|
209
|
+
and current.version > read_version
|
|
210
|
+
and current.provenance.agent_id != self.agent_id
|
|
211
|
+
):
|
|
212
|
+
if self._on_conflict is not None:
|
|
213
|
+
value = self._on_conflict(
|
|
214
|
+
current.model_copy(),
|
|
215
|
+
current,
|
|
216
|
+
value,
|
|
217
|
+
)
|
|
218
|
+
logger.info(
|
|
219
|
+
"Conflict on %s resolved by on_conflict callback",
|
|
220
|
+
entry_key,
|
|
221
|
+
)
|
|
222
|
+
elif self._conflict_policy == ConflictPolicy.RAISE:
|
|
223
|
+
raise StaleWriteError(
|
|
224
|
+
entity_path,
|
|
225
|
+
key,
|
|
226
|
+
read_version,
|
|
227
|
+
current.version,
|
|
228
|
+
current.provenance.agent_id,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
embedding = None
|
|
232
|
+
if self._embedder is not None:
|
|
233
|
+
embedding = self._embedder.embed_value(value)
|
|
234
|
+
|
|
235
|
+
importance_score = None
|
|
236
|
+
importance_dimensions = None
|
|
237
|
+
if self._importance_evaluator is not None:
|
|
238
|
+
try:
|
|
239
|
+
importance_score, importance_dimensions = self._importance_evaluator.evaluate(
|
|
240
|
+
value,
|
|
241
|
+
entity_path=entity_path,
|
|
242
|
+
key=key,
|
|
243
|
+
)
|
|
244
|
+
except Exception:
|
|
245
|
+
logger.debug("Importance evaluation failed, skipping", exc_info=True)
|
|
246
|
+
|
|
247
|
+
entry = self._engine.write(
|
|
248
|
+
entity_path,
|
|
249
|
+
key,
|
|
250
|
+
value,
|
|
251
|
+
confidence=confidence,
|
|
252
|
+
ttl_at=ttl_at,
|
|
253
|
+
pattern_refs=pattern_refs,
|
|
254
|
+
memory_type=memory_type,
|
|
255
|
+
artifact_refs=artifact_refs,
|
|
256
|
+
shared=shared,
|
|
257
|
+
branch=effective_branch,
|
|
258
|
+
embedding=embedding,
|
|
259
|
+
importance_score=importance_score,
|
|
260
|
+
importance_dimensions=importance_dimensions or None,
|
|
261
|
+
)
|
|
262
|
+
self._read_tracker.record_write(entity_path, key, entry.version, entry.version == 1)
|
|
263
|
+
|
|
264
|
+
try:
|
|
265
|
+
self._adapter.log_event(Event(
|
|
266
|
+
namespace=self.namespace,
|
|
267
|
+
agent_id=self.agent_id,
|
|
268
|
+
branch=effective_branch,
|
|
269
|
+
event_type=EventType.WRITE,
|
|
270
|
+
summary=f"Wrote {entity_path}/{key} v{entry.version}",
|
|
271
|
+
details={
|
|
272
|
+
"entity_path": entity_path,
|
|
273
|
+
"key": key,
|
|
274
|
+
"version": entry.version,
|
|
275
|
+
"confidence": entry.confidence,
|
|
276
|
+
"memory_type": entry.memory_type.value if hasattr(entry.memory_type, "value") else str(entry.memory_type),
|
|
277
|
+
"shared": entry.shared,
|
|
278
|
+
},
|
|
279
|
+
))
|
|
280
|
+
except Exception:
|
|
281
|
+
logger.debug("Failed to log write event", exc_info=True)
|
|
282
|
+
|
|
283
|
+
if pattern_refs:
|
|
284
|
+
self._materialize_pattern_ref_edges(
|
|
285
|
+
entity_path, key, pattern_refs, effective_branch,
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
return entry
|
|
289
|
+
|
|
290
|
+
def _materialize_pattern_ref_edges(
|
|
291
|
+
self,
|
|
292
|
+
entity_path: str,
|
|
293
|
+
key: str,
|
|
294
|
+
pattern_refs: list[str],
|
|
295
|
+
branch: str,
|
|
296
|
+
) -> None:
|
|
297
|
+
"""Best-effort: create graph edges from pattern_refs."""
|
|
298
|
+
for ref in pattern_refs:
|
|
299
|
+
try:
|
|
300
|
+
self._adapter.upsert_graph_edge(
|
|
301
|
+
GraphEdge(
|
|
302
|
+
source_entity=f"{entity_path}/{key}",
|
|
303
|
+
source_type="entry",
|
|
304
|
+
relation="references",
|
|
305
|
+
target_entity=ref,
|
|
306
|
+
target_type="entry",
|
|
307
|
+
provenance={"agent_id": self.agent_id, "trigger": "write"},
|
|
308
|
+
),
|
|
309
|
+
namespace=self.namespace,
|
|
310
|
+
branch=branch,
|
|
311
|
+
)
|
|
312
|
+
except Exception:
|
|
313
|
+
logger.debug("Failed to materialize pattern_ref edge for %s", ref, exc_info=True)
|
|
314
|
+
|
|
315
|
+
def list(
|
|
316
|
+
self,
|
|
317
|
+
entity_path: str | None = None,
|
|
318
|
+
*,
|
|
319
|
+
include_superseded: bool = False,
|
|
320
|
+
branch: str | None = None,
|
|
321
|
+
) -> list[MemoryEntry]:
|
|
322
|
+
"""List current entries, optionally filtered to an entity path.
|
|
323
|
+
|
|
324
|
+
Private entries from other agents are excluded.
|
|
325
|
+
"""
|
|
326
|
+
import time
|
|
327
|
+
effective_branch = branch or self._branch
|
|
328
|
+
start = time.monotonic()
|
|
329
|
+
results = self._engine.list(entity_path, include_superseded=include_superseded, branch=effective_branch)
|
|
330
|
+
results = [
|
|
331
|
+
e for e in results
|
|
332
|
+
if e.shared or e.provenance.agent_id == self.agent_id
|
|
333
|
+
]
|
|
334
|
+
duration = (time.monotonic() - start) * 1000
|
|
335
|
+
self._read_tracker.record_query(
|
|
336
|
+
"list",
|
|
337
|
+
{"entity_path": entity_path, "include_superseded": include_superseded},
|
|
338
|
+
len(results),
|
|
339
|
+
duration,
|
|
340
|
+
)
|
|
341
|
+
return results
|
|
342
|
+
|
|
343
|
+
def watch(
|
|
344
|
+
self,
|
|
345
|
+
entity_path: str,
|
|
346
|
+
callback: Any,
|
|
347
|
+
) -> WatchHandle:
|
|
348
|
+
"""Watch for writes to any key under an entity path."""
|
|
349
|
+
return self._adapter.watch(entity_path, callback)
|
|
350
|
+
|
|
351
|
+
# ------------------------------------------------------------------
|
|
352
|
+
# Search & Stats
|
|
353
|
+
# ------------------------------------------------------------------
|
|
354
|
+
|
|
355
|
+
def search(
|
|
356
|
+
self,
|
|
357
|
+
*,
|
|
358
|
+
query: str | None = None,
|
|
359
|
+
entity_path: str | None = None,
|
|
360
|
+
entity_paths: list[str] | None = None,
|
|
361
|
+
min_confidence: float = 0.0,
|
|
362
|
+
max_confidence: float | None = None,
|
|
363
|
+
agent_id: str | None = None,
|
|
364
|
+
since: datetime | None = None,
|
|
365
|
+
pattern_ref: str | None = None,
|
|
366
|
+
limit: int = 100,
|
|
367
|
+
sort_by: str = "confidence",
|
|
368
|
+
recall_config: RecallConfig | None = None,
|
|
369
|
+
depth: int = 3,
|
|
370
|
+
) -> list[MemoryEntry] | list[ScoredEntry]:
|
|
371
|
+
"""Search across all entities with rich filters.
|
|
372
|
+
|
|
373
|
+
When *query* is provided the text is forwarded to the adapter for
|
|
374
|
+
full-text search (Postgres tsvector) and, when *recall_config* is
|
|
375
|
+
also set, used for cosine-similarity scoring against entry embeddings.
|
|
376
|
+
|
|
377
|
+
When *entity_paths* is provided, runs a search for each path and merges
|
|
378
|
+
the results. *entity_path* (singular) is still supported for backwards
|
|
379
|
+
compatibility; if both are given, *entity_paths* takes precedence.
|
|
380
|
+
|
|
381
|
+
When *recall_config* is provided, returns ``ScoredEntry`` objects
|
|
382
|
+
sorted by composite recall score instead.
|
|
383
|
+
|
|
384
|
+
*depth* controls progressive retrieval across memory tiers:
|
|
385
|
+
1 = HOT only, 2 = HOT + WARM, 3 = all tiers (default).
|
|
386
|
+
"""
|
|
387
|
+
from amfs_core.embedder import cosine_similarity
|
|
388
|
+
|
|
389
|
+
paths = entity_paths or ([entity_path] if entity_path else [None])
|
|
390
|
+
|
|
391
|
+
seen_keys: set[str] = set()
|
|
392
|
+
merged: list[MemoryEntry] = []
|
|
393
|
+
for ep in paths:
|
|
394
|
+
sq = SearchQuery(
|
|
395
|
+
query=query,
|
|
396
|
+
entity_path=ep,
|
|
397
|
+
min_confidence=min_confidence,
|
|
398
|
+
max_confidence=max_confidence,
|
|
399
|
+
agent_id=agent_id,
|
|
400
|
+
since=since,
|
|
401
|
+
pattern_ref=pattern_ref,
|
|
402
|
+
limit=limit,
|
|
403
|
+
sort_by=sort_by,
|
|
404
|
+
recall_config=recall_config,
|
|
405
|
+
depth=depth,
|
|
406
|
+
)
|
|
407
|
+
for entry in self._adapter.search(sq):
|
|
408
|
+
if entry.entry_key not in seen_keys:
|
|
409
|
+
if not entry.shared and entry.provenance.agent_id != self.agent_id:
|
|
410
|
+
continue
|
|
411
|
+
seen_keys.add(entry.entry_key)
|
|
412
|
+
merged.append(entry)
|
|
413
|
+
|
|
414
|
+
sort_key: Callable[[MemoryEntry], Any]
|
|
415
|
+
if sort_by == "recency":
|
|
416
|
+
sort_key = lambda e: e.provenance.written_at
|
|
417
|
+
elif sort_by == "version":
|
|
418
|
+
sort_key = lambda e: e.version
|
|
419
|
+
else:
|
|
420
|
+
sort_key = lambda e: e.confidence
|
|
421
|
+
merged.sort(key=sort_key, reverse=True)
|
|
422
|
+
entries = merged[:limit]
|
|
423
|
+
|
|
424
|
+
self._read_tracker.record_query(
|
|
425
|
+
"search",
|
|
426
|
+
{"entity_path": entity_path, "min_confidence": min_confidence, "limit": limit, "sort_by": sort_by},
|
|
427
|
+
len(entries),
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
if recall_config is None:
|
|
431
|
+
return entries
|
|
432
|
+
|
|
433
|
+
query_vec: list[float] | None = None
|
|
434
|
+
if query and self._embedder is not None:
|
|
435
|
+
query_vec = self._embedder.embed(query)
|
|
436
|
+
|
|
437
|
+
now = datetime.now(timezone.utc)
|
|
438
|
+
scored: list[ScoredEntry] = []
|
|
439
|
+
for entry in entries:
|
|
440
|
+
age = now - entry.provenance.written_at
|
|
441
|
+
age_days = age.total_seconds() / 86400.0
|
|
442
|
+
half_life = recall_config.recency_half_life_days
|
|
443
|
+
|
|
444
|
+
recency_score = math.exp(-math.log(2) * age_days / half_life) if half_life > 0 else 0.0
|
|
445
|
+
confidence_score = max(0.0, min(1.0, entry.confidence))
|
|
446
|
+
|
|
447
|
+
semantic_score = 0.0
|
|
448
|
+
if query_vec is not None and entry.embedding is not None:
|
|
449
|
+
semantic_score = max(0.0, cosine_similarity(query_vec, entry.embedding))
|
|
450
|
+
|
|
451
|
+
composite = (
|
|
452
|
+
recall_config.semantic_weight * semantic_score
|
|
453
|
+
+ recall_config.recency_weight * recency_score
|
|
454
|
+
+ recall_config.confidence_weight * confidence_score
|
|
455
|
+
)
|
|
456
|
+
scored.append(ScoredEntry(
|
|
457
|
+
entry=entry,
|
|
458
|
+
score=composite,
|
|
459
|
+
breakdown={
|
|
460
|
+
"semantic": recall_config.semantic_weight * semantic_score,
|
|
461
|
+
"recency": recall_config.recency_weight * recency_score,
|
|
462
|
+
"confidence": recall_config.confidence_weight * confidence_score,
|
|
463
|
+
},
|
|
464
|
+
))
|
|
465
|
+
|
|
466
|
+
scored.sort(key=lambda s: s.score, reverse=True)
|
|
467
|
+
return scored
|
|
468
|
+
|
|
469
|
+
def semantic_search(
|
|
470
|
+
self,
|
|
471
|
+
text: str,
|
|
472
|
+
*,
|
|
473
|
+
entity_path: str | None = None,
|
|
474
|
+
min_confidence: float = 0.0,
|
|
475
|
+
limit: int = 10,
|
|
476
|
+
min_similarity: float = 0.0,
|
|
477
|
+
) -> list[tuple[MemoryEntry, float]]:
|
|
478
|
+
"""Search entries by meaning. Requires an embedder to be configured.
|
|
479
|
+
|
|
480
|
+
Returns ``(entry, similarity_score)`` tuples sorted by similarity.
|
|
481
|
+
"""
|
|
482
|
+
if self._embedder is None:
|
|
483
|
+
raise RuntimeError(
|
|
484
|
+
"semantic_search() requires an embedder. "
|
|
485
|
+
"Pass embedder= to AgentMemory()."
|
|
486
|
+
)
|
|
487
|
+
query = SemanticQuery(
|
|
488
|
+
text=text,
|
|
489
|
+
entity_path=entity_path,
|
|
490
|
+
min_confidence=min_confidence,
|
|
491
|
+
limit=limit,
|
|
492
|
+
min_similarity=min_similarity,
|
|
493
|
+
)
|
|
494
|
+
return self._adapter.semantic_search(query, self._embedder)
|
|
495
|
+
|
|
496
|
+
def stats(self) -> MemoryStats:
|
|
497
|
+
"""Aggregate statistics about current memory state."""
|
|
498
|
+
return self._adapter.stats()
|
|
499
|
+
|
|
500
|
+
# ------------------------------------------------------------------
|
|
501
|
+
# Knowledge graph
|
|
502
|
+
# ------------------------------------------------------------------
|
|
503
|
+
|
|
504
|
+
def graph_neighbors(
|
|
505
|
+
self,
|
|
506
|
+
entity: str,
|
|
507
|
+
*,
|
|
508
|
+
relation: str | None = None,
|
|
509
|
+
direction: str = "both",
|
|
510
|
+
min_confidence: float = 0.0,
|
|
511
|
+
depth: int = 1,
|
|
512
|
+
limit: int = 50,
|
|
513
|
+
) -> list[GraphEdge]:
|
|
514
|
+
"""Traverse the knowledge graph from an entity."""
|
|
515
|
+
query = GraphNeighborQuery(
|
|
516
|
+
entity=entity,
|
|
517
|
+
relation=relation,
|
|
518
|
+
direction=direction,
|
|
519
|
+
min_confidence=min_confidence,
|
|
520
|
+
depth=depth,
|
|
521
|
+
limit=limit,
|
|
522
|
+
)
|
|
523
|
+
return self._adapter.graph_neighbors(
|
|
524
|
+
query,
|
|
525
|
+
namespace=self.namespace,
|
|
526
|
+
branch=self._branch,
|
|
527
|
+
)
|
|
528
|
+
|
|
529
|
+
# ------------------------------------------------------------------
|
|
530
|
+
# Outcomes
|
|
531
|
+
# ------------------------------------------------------------------
|
|
532
|
+
|
|
533
|
+
def commit_outcome(
|
|
534
|
+
self,
|
|
535
|
+
outcome_ref: str,
|
|
536
|
+
outcome_type: OutcomeType,
|
|
537
|
+
causal_entry_keys: list[str] | None = None,
|
|
538
|
+
*,
|
|
539
|
+
causal_confidence: float = 1.0,
|
|
540
|
+
decision_summary: str | None = None,
|
|
541
|
+
) -> list[MemoryEntry]:
|
|
542
|
+
"""Record an outcome and back-propagate confidence changes.
|
|
543
|
+
|
|
544
|
+
If *causal_entry_keys* is ``None``, automatically uses the session's
|
|
545
|
+
read log — every entry this agent read becomes a causal link.
|
|
546
|
+
"""
|
|
547
|
+
if causal_entry_keys is None:
|
|
548
|
+
causal_entry_keys = self._read_tracker.causal_keys
|
|
549
|
+
record = OutcomeBackPropagator.make_record(
|
|
550
|
+
outcome_ref=outcome_ref,
|
|
551
|
+
outcome_type=outcome_type,
|
|
552
|
+
causal_entry_keys=causal_entry_keys,
|
|
553
|
+
agent_id=self.agent_id,
|
|
554
|
+
causal_confidence=causal_confidence,
|
|
555
|
+
)
|
|
556
|
+
updated = self._propagator.propagate(record)
|
|
557
|
+
|
|
558
|
+
causal_trace_entries: list[TraceEntry] = []
|
|
559
|
+
for ek in causal_entry_keys:
|
|
560
|
+
parts = ek.rsplit("/", 1)
|
|
561
|
+
if len(parts) != 2:
|
|
562
|
+
continue
|
|
563
|
+
ep, k = parts
|
|
564
|
+
entry = self._adapter.read(ep, k)
|
|
565
|
+
if entry:
|
|
566
|
+
snapshot = self._read_tracker.entry_snapshot(ek)
|
|
567
|
+
causal_trace_entries.append(TraceEntry(
|
|
568
|
+
entity_path=ep, key=k,
|
|
569
|
+
version=self._read_tracker.read_version(ek) or entry.version,
|
|
570
|
+
confidence=entry.confidence,
|
|
571
|
+
value=snapshot.get("value") if snapshot else None,
|
|
572
|
+
memory_type=snapshot.get("memory_type") if snapshot else None,
|
|
573
|
+
written_by=snapshot.get("written_by") if snapshot else None,
|
|
574
|
+
read_at=self._read_tracker._reads.get(ek),
|
|
575
|
+
))
|
|
576
|
+
|
|
577
|
+
ext_contexts = [
|
|
578
|
+
ExternalContext(
|
|
579
|
+
label=c.get("label", ""),
|
|
580
|
+
summary=c.get("summary", ""),
|
|
581
|
+
source=c.get("source"),
|
|
582
|
+
recorded_at=datetime.fromisoformat(c["recorded_at"]) if c.get("recorded_at") else datetime.now(timezone.utc),
|
|
583
|
+
)
|
|
584
|
+
for c in self._read_tracker.external_contexts
|
|
585
|
+
]
|
|
586
|
+
|
|
587
|
+
now = datetime.now(timezone.utc)
|
|
588
|
+
query_events = [
|
|
589
|
+
QueryEvent(
|
|
590
|
+
operation=q.get("operation", ""),
|
|
591
|
+
parameters=q.get("parameters", {}),
|
|
592
|
+
result_count=q.get("result_count", 0),
|
|
593
|
+
duration_ms=q.get("duration_ms"),
|
|
594
|
+
occurred_at=datetime.fromisoformat(q["occurred_at"]) if q.get("occurred_at") else now,
|
|
595
|
+
)
|
|
596
|
+
for q in self._read_tracker.query_events
|
|
597
|
+
]
|
|
598
|
+
|
|
599
|
+
session_started = self._read_tracker.session_started_at
|
|
600
|
+
session_duration = (now - session_started).total_seconds() * 1000
|
|
601
|
+
|
|
602
|
+
error_events = [
|
|
603
|
+
ErrorEvent(
|
|
604
|
+
operation=e.get("operation", ""),
|
|
605
|
+
error_type=e.get("error_type", ""),
|
|
606
|
+
message=e.get("message", ""),
|
|
607
|
+
stack_trace=e.get("stack_trace"),
|
|
608
|
+
occurred_at=datetime.fromisoformat(e["occurred_at"]) if e.get("occurred_at") else now,
|
|
609
|
+
)
|
|
610
|
+
for e in self._read_tracker.error_events
|
|
611
|
+
]
|
|
612
|
+
|
|
613
|
+
writes = self._read_tracker.write_events
|
|
614
|
+
state_diff = MemoryStateDiff(
|
|
615
|
+
entries_created=sum(1 for w in writes if w.get("is_new")),
|
|
616
|
+
entries_updated=sum(1 for w in writes if not w.get("is_new")),
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
trace = DecisionTrace(
|
|
620
|
+
agent_id=self.agent_id,
|
|
621
|
+
session_id=self.session_id,
|
|
622
|
+
outcome_ref=outcome_ref,
|
|
623
|
+
outcome_type=outcome_type.value,
|
|
624
|
+
decision_summary=decision_summary,
|
|
625
|
+
causal_entries=causal_trace_entries,
|
|
626
|
+
external_contexts=ext_contexts,
|
|
627
|
+
query_events=query_events,
|
|
628
|
+
session_started_at=session_started,
|
|
629
|
+
session_ended_at=now,
|
|
630
|
+
session_duration_ms=session_duration,
|
|
631
|
+
error_events=error_events,
|
|
632
|
+
state_diff=state_diff,
|
|
633
|
+
)
|
|
634
|
+
try:
|
|
635
|
+
self._adapter.save_trace(trace)
|
|
636
|
+
except Exception:
|
|
637
|
+
logger.debug("Failed to persist decision trace", exc_info=True)
|
|
638
|
+
|
|
639
|
+
try:
|
|
640
|
+
self._adapter.log_event(Event(
|
|
641
|
+
namespace=self.namespace,
|
|
642
|
+
agent_id=self.agent_id,
|
|
643
|
+
branch=self._branch,
|
|
644
|
+
event_type=EventType.OUTCOME,
|
|
645
|
+
summary=f"Committed outcome '{outcome_ref}' ({outcome_type.value})",
|
|
646
|
+
details={
|
|
647
|
+
"outcome_ref": outcome_ref,
|
|
648
|
+
"outcome_type": outcome_type.value,
|
|
649
|
+
"causal_entries": len(causal_entry_keys),
|
|
650
|
+
"entries_updated": len(updated),
|
|
651
|
+
},
|
|
652
|
+
))
|
|
653
|
+
except Exception:
|
|
654
|
+
logger.debug("Failed to log outcome event", exc_info=True)
|
|
655
|
+
|
|
656
|
+
self._materialize_causal_edges(outcome_ref, outcome_type, causal_entry_keys)
|
|
657
|
+
|
|
658
|
+
return updated
|
|
659
|
+
|
|
660
|
+
def _materialize_causal_edges(
|
|
661
|
+
self,
|
|
662
|
+
outcome_ref: str,
|
|
663
|
+
outcome_type: OutcomeType,
|
|
664
|
+
causal_entry_keys: list[str],
|
|
665
|
+
) -> None:
|
|
666
|
+
"""Best-effort: create graph edges from the causal chain."""
|
|
667
|
+
edge_conf = 1.0 if outcome_type in (OutcomeType.SUCCESS,) else 0.7
|
|
668
|
+
branch = self._branch
|
|
669
|
+
for ek in causal_entry_keys:
|
|
670
|
+
try:
|
|
671
|
+
self._adapter.upsert_graph_edge(
|
|
672
|
+
GraphEdge(
|
|
673
|
+
source_entity=ek,
|
|
674
|
+
source_type="entry",
|
|
675
|
+
relation="informed",
|
|
676
|
+
target_entity=outcome_ref,
|
|
677
|
+
target_type="outcome",
|
|
678
|
+
confidence=edge_conf,
|
|
679
|
+
provenance={"agent_id": self.agent_id, "trigger": "commit_outcome"},
|
|
680
|
+
),
|
|
681
|
+
namespace=self.namespace,
|
|
682
|
+
branch=branch,
|
|
683
|
+
)
|
|
684
|
+
self._adapter.upsert_graph_edge(
|
|
685
|
+
GraphEdge(
|
|
686
|
+
source_entity=self.agent_id,
|
|
687
|
+
source_type="agent",
|
|
688
|
+
relation="read",
|
|
689
|
+
target_entity=ek,
|
|
690
|
+
target_type="entry",
|
|
691
|
+
confidence=edge_conf,
|
|
692
|
+
provenance={"agent_id": self.agent_id, "trigger": "commit_outcome"},
|
|
693
|
+
),
|
|
694
|
+
namespace=self.namespace,
|
|
695
|
+
branch=branch,
|
|
696
|
+
)
|
|
697
|
+
except Exception:
|
|
698
|
+
logger.debug("Failed to materialize causal edge for %s", ek, exc_info=True)
|
|
699
|
+
|
|
700
|
+
for i, a in enumerate(causal_entry_keys):
|
|
701
|
+
for b in causal_entry_keys[i + 1:]:
|
|
702
|
+
try:
|
|
703
|
+
self._adapter.upsert_graph_edge(
|
|
704
|
+
GraphEdge(
|
|
705
|
+
source_entity=a,
|
|
706
|
+
source_type="entry",
|
|
707
|
+
relation="co_occurs_with",
|
|
708
|
+
target_entity=b,
|
|
709
|
+
target_type="entry",
|
|
710
|
+
confidence=edge_conf,
|
|
711
|
+
provenance={"agent_id": self.agent_id, "trigger": "commit_outcome"},
|
|
712
|
+
),
|
|
713
|
+
namespace=self.namespace,
|
|
714
|
+
branch=branch,
|
|
715
|
+
)
|
|
716
|
+
except Exception:
|
|
717
|
+
logger.debug("Failed to materialize co-occurrence edge", exc_info=True)
|
|
718
|
+
|
|
719
|
+
# ------------------------------------------------------------------
|
|
720
|
+
# Temporal & Explainability
|
|
721
|
+
# ------------------------------------------------------------------
|
|
722
|
+
|
|
723
|
+
def history(
|
|
724
|
+
self,
|
|
725
|
+
entity_path: str,
|
|
726
|
+
key: str,
|
|
727
|
+
*,
|
|
728
|
+
since: datetime | None = None,
|
|
729
|
+
until: datetime | None = None,
|
|
730
|
+
) -> list[MemoryEntry]:
|
|
731
|
+
"""Return the full version history of a key, ordered by version.
|
|
732
|
+
|
|
733
|
+
Enables temporal queries like "how did this memory change over time?"
|
|
734
|
+
Each entry in the returned list is a CoW snapshot with its confidence
|
|
735
|
+
and provenance at the time it was written.
|
|
736
|
+
"""
|
|
737
|
+
return self._engine.history(entity_path, key, since=since, until=until, branch=self._branch)
|
|
738
|
+
|
|
739
|
+
def record_context(
|
|
740
|
+
self,
|
|
741
|
+
label: str,
|
|
742
|
+
summary: str,
|
|
743
|
+
*,
|
|
744
|
+
source: str | None = None,
|
|
745
|
+
) -> None:
|
|
746
|
+
"""Record external context in the causal chain without writing to storage.
|
|
747
|
+
|
|
748
|
+
Call this after consulting an external tool, API, or data source so
|
|
749
|
+
that ``explain()`` returns a complete decision trace — not just which
|
|
750
|
+
AMFS entries were read, but also which external inputs informed the
|
|
751
|
+
agent's decisions.
|
|
752
|
+
|
|
753
|
+
Example::
|
|
754
|
+
|
|
755
|
+
mem.record_context(
|
|
756
|
+
"pagerduty-incidents",
|
|
757
|
+
"3 SEV-1 incidents in the last 24h for checkout-service",
|
|
758
|
+
source="PagerDuty API",
|
|
759
|
+
)
|
|
760
|
+
"""
|
|
761
|
+
self._read_tracker.record_context(label, summary, source=source)
|
|
762
|
+
|
|
763
|
+
def explain(self, outcome_ref: str | None = None) -> dict[str, Any]:
|
|
764
|
+
"""Return the causal chain for the current session or a specific outcome.
|
|
765
|
+
|
|
766
|
+
Shows which memories were read (and in what order) before the outcome
|
|
767
|
+
was committed, plus any external contexts recorded via
|
|
768
|
+
``record_context()``. This is production-grounded explainability:
|
|
769
|
+
not what the LLM inferred, but which stored knowledge and external
|
|
770
|
+
inputs actually drove the decision.
|
|
771
|
+
"""
|
|
772
|
+
causal_keys = self._read_tracker.causal_keys
|
|
773
|
+
entries: list[dict[str, Any]] = []
|
|
774
|
+
for ek in causal_keys:
|
|
775
|
+
parts = ek.rsplit("/", 1)
|
|
776
|
+
if len(parts) != 2:
|
|
777
|
+
continue
|
|
778
|
+
ep, k = parts
|
|
779
|
+
entry = self._adapter.read(ep, k)
|
|
780
|
+
if entry:
|
|
781
|
+
data = entry.model_dump(mode="json")
|
|
782
|
+
data.pop("embedding", None)
|
|
783
|
+
data["read_version"] = self._read_tracker.read_version(ek)
|
|
784
|
+
entries.append(data)
|
|
785
|
+
return {
|
|
786
|
+
"outcome_ref": outcome_ref,
|
|
787
|
+
"agent_id": self.agent_id,
|
|
788
|
+
"session_id": self.session_id,
|
|
789
|
+
"causal_chain_length": len(causal_keys),
|
|
790
|
+
"causal_entries": entries,
|
|
791
|
+
"external_contexts": self._read_tracker.external_contexts,
|
|
792
|
+
}
|
|
793
|
+
|
|
794
|
+
# ------------------------------------------------------------------
|
|
795
|
+
# Agent brain — scoped recall & cross-agent reads
|
|
796
|
+
# ------------------------------------------------------------------
|
|
797
|
+
|
|
798
|
+
def recall(
|
|
799
|
+
self,
|
|
800
|
+
entity_path: str,
|
|
801
|
+
key: str,
|
|
802
|
+
*,
|
|
803
|
+
min_confidence: float = 0.0,
|
|
804
|
+
) -> MemoryEntry | None:
|
|
805
|
+
"""Recall this agent's own memory for a key.
|
|
806
|
+
|
|
807
|
+
Unlike ``read()``, which returns the latest version by any agent,
|
|
808
|
+
``recall()`` returns only entries written by this agent — what this
|
|
809
|
+
brain actually knows from direct experience. Falls back through
|
|
810
|
+
history to find the most recent version authored by this agent.
|
|
811
|
+
"""
|
|
812
|
+
entry = self._engine.read(entity_path, key, min_confidence=0.0)
|
|
813
|
+
if entry is None:
|
|
814
|
+
return None
|
|
815
|
+
if entry.provenance.agent_id == self.agent_id:
|
|
816
|
+
return entry if entry.confidence >= min_confidence else None
|
|
817
|
+
versions = self._engine.history(entity_path, key)
|
|
818
|
+
for v in reversed(versions):
|
|
819
|
+
if v.provenance.agent_id == self.agent_id:
|
|
820
|
+
return v if v.confidence >= min_confidence else None
|
|
821
|
+
return None
|
|
822
|
+
|
|
823
|
+
def my_entries(
|
|
824
|
+
self,
|
|
825
|
+
entity_path: str | None = None,
|
|
826
|
+
) -> list[MemoryEntry]:
|
|
827
|
+
"""List entries written by this agent — the contents of this brain."""
|
|
828
|
+
return self.search(entity_path=entity_path, agent_id=self.agent_id)
|
|
829
|
+
|
|
830
|
+
def read_from(
|
|
831
|
+
self,
|
|
832
|
+
agent_id: str,
|
|
833
|
+
entity_path: str,
|
|
834
|
+
key: str,
|
|
835
|
+
) -> MemoryEntry | None:
|
|
836
|
+
"""Read a specific key from another agent's memory.
|
|
837
|
+
|
|
838
|
+
Makes cross-agent knowledge transfer explicit and trackable.
|
|
839
|
+
The read is logged in the causal chain for decision tracing.
|
|
840
|
+
"""
|
|
841
|
+
entries = self.search(entity_path=entity_path, agent_id=agent_id)
|
|
842
|
+
matching = [e for e in entries if e.key == key]
|
|
843
|
+
if not matching:
|
|
844
|
+
return None
|
|
845
|
+
entry = matching[0]
|
|
846
|
+
self._read_tracker.record(entry)
|
|
847
|
+
|
|
848
|
+
try:
|
|
849
|
+
self._adapter.log_event(Event(
|
|
850
|
+
namespace=self.namespace,
|
|
851
|
+
agent_id=self.agent_id,
|
|
852
|
+
branch=self._branch,
|
|
853
|
+
event_type=EventType.CROSS_AGENT_READ,
|
|
854
|
+
summary=f"Read {entity_path}/{key} from agent '{agent_id}'",
|
|
855
|
+
details={
|
|
856
|
+
"source_agent_id": agent_id,
|
|
857
|
+
"entity_path": entity_path,
|
|
858
|
+
"key": key,
|
|
859
|
+
"version": entry.version,
|
|
860
|
+
},
|
|
861
|
+
))
|
|
862
|
+
except Exception:
|
|
863
|
+
logger.debug("Failed to log cross-agent read event", exc_info=True)
|
|
864
|
+
|
|
865
|
+
try:
|
|
866
|
+
self._adapter.upsert_graph_edge(
|
|
867
|
+
GraphEdge(
|
|
868
|
+
source_entity=self.agent_id,
|
|
869
|
+
source_type="agent",
|
|
870
|
+
relation="learned_from",
|
|
871
|
+
target_entity=agent_id,
|
|
872
|
+
target_type="agent",
|
|
873
|
+
provenance={
|
|
874
|
+
"entity_path": entity_path,
|
|
875
|
+
"key": key,
|
|
876
|
+
"trigger": "read_from",
|
|
877
|
+
},
|
|
878
|
+
),
|
|
879
|
+
namespace=self.namespace,
|
|
880
|
+
branch=self._branch,
|
|
881
|
+
)
|
|
882
|
+
except Exception:
|
|
883
|
+
logger.debug("Failed to materialize cross-agent graph edge", exc_info=True)
|
|
884
|
+
|
|
885
|
+
return entry
|
|
886
|
+
|
|
887
|
+
# ------------------------------------------------------------------
|
|
888
|
+
# Cross-agent relationships
|
|
889
|
+
# ------------------------------------------------------------------
|
|
890
|
+
|
|
891
|
+
def cross_agent_reads(self) -> dict[str, list[dict[str, Any]]]:
|
|
892
|
+
"""Return which other agents' memory this agent has read.
|
|
893
|
+
|
|
894
|
+
Examines the decision traces to find all entries read by this agent,
|
|
895
|
+
then identifies which of those were written by other agents.
|
|
896
|
+
|
|
897
|
+
Returns a dict mapping ``other_agent_id`` to a list of dicts with
|
|
898
|
+
``entity_path``, ``key``, and ``read_count``.
|
|
899
|
+
|
|
900
|
+
Use this to answer questions like:
|
|
901
|
+
- "Which agents have I talked to?"
|
|
902
|
+
- "What memory did I get from agent X?"
|
|
903
|
+
|
|
904
|
+
Example::
|
|
905
|
+
|
|
906
|
+
reads = mem.cross_agent_reads()
|
|
907
|
+
# {'deploy-agent': [{'entity_path': 'checkout-service', 'key': 'retry-pattern', 'read_count': 3}]}
|
|
908
|
+
"""
|
|
909
|
+
entries = self.list()
|
|
910
|
+
traces = self._adapter.list_traces(agent_id=self.agent_id, limit=10000)
|
|
911
|
+
|
|
912
|
+
entry_authors: dict[str, str] = {}
|
|
913
|
+
for e in entries:
|
|
914
|
+
entry_authors[f"{e.entity_path}/{e.key}"] = e.provenance.agent_id
|
|
915
|
+
|
|
916
|
+
read_entities: dict[str, dict[str, int]] = {}
|
|
917
|
+
for t in traces:
|
|
918
|
+
for ce in t.causal_entries:
|
|
919
|
+
ep = ce.entity_path
|
|
920
|
+
key = ce.key
|
|
921
|
+
if ep not in read_entities:
|
|
922
|
+
read_entities[ep] = {}
|
|
923
|
+
read_entities[ep][key] = read_entities[ep].get(key, 0) + 1
|
|
924
|
+
|
|
925
|
+
cross_reads: dict[str, list[dict[str, Any]]] = {}
|
|
926
|
+
for ep, keys in read_entities.items():
|
|
927
|
+
for key, count in keys.items():
|
|
928
|
+
author = entry_authors.get(f"{ep}/{key}")
|
|
929
|
+
if author and author != self.agent_id:
|
|
930
|
+
if author not in cross_reads:
|
|
931
|
+
cross_reads[author] = []
|
|
932
|
+
cross_reads[author].append({
|
|
933
|
+
"entity_path": ep,
|
|
934
|
+
"key": key,
|
|
935
|
+
"read_count": count,
|
|
936
|
+
})
|
|
937
|
+
|
|
938
|
+
return cross_reads
|
|
939
|
+
|
|
940
|
+
def agents_i_read_from(self) -> list[str]:
|
|
941
|
+
"""Return a list of other agent IDs whose memory this agent has read.
|
|
942
|
+
|
|
943
|
+
A convenience wrapper around :meth:`cross_agent_reads` that returns
|
|
944
|
+
just the agent IDs.
|
|
945
|
+
"""
|
|
946
|
+
return list(self.cross_agent_reads().keys())
|
|
947
|
+
|
|
948
|
+
# ------------------------------------------------------------------
|
|
949
|
+
# Timeline (git log)
|
|
950
|
+
# ------------------------------------------------------------------
|
|
951
|
+
|
|
952
|
+
def timeline(
|
|
953
|
+
self,
|
|
954
|
+
*,
|
|
955
|
+
event_type: str | None = None,
|
|
956
|
+
since: datetime | None = None,
|
|
957
|
+
limit: int = 100,
|
|
958
|
+
) -> list[Event]:
|
|
959
|
+
"""Return this agent's event timeline (git commit log).
|
|
960
|
+
|
|
961
|
+
Every write, outcome, and cross-agent read is automatically
|
|
962
|
+
recorded as an event. Use this to see the history of what
|
|
963
|
+
happened to this agent's memory.
|
|
964
|
+
"""
|
|
965
|
+
return self._adapter.list_events(
|
|
966
|
+
self.agent_id,
|
|
967
|
+
self.namespace,
|
|
968
|
+
event_type=event_type,
|
|
969
|
+
since=since,
|
|
970
|
+
limit=limit,
|
|
971
|
+
)
|
|
972
|
+
|
|
973
|
+
# ------------------------------------------------------------------
|
|
974
|
+
# Briefing (Memory Cortex)
|
|
975
|
+
# ------------------------------------------------------------------
|
|
976
|
+
|
|
977
|
+
def briefing(
|
|
978
|
+
self,
|
|
979
|
+
entity_path: str | None = None,
|
|
980
|
+
agent_id: str | None = None,
|
|
981
|
+
limit: int = 10,
|
|
982
|
+
branch: str | None = None,
|
|
983
|
+
) -> list:
|
|
984
|
+
"""Get a ranked briefing of compiled knowledge digests.
|
|
985
|
+
|
|
986
|
+
Returns pre-compiled Digest objects from the Cortex, ranked by
|
|
987
|
+
relevance to the given entity or agent context. If no Cortex is
|
|
988
|
+
running, returns an empty list.
|
|
989
|
+
|
|
990
|
+
Args:
|
|
991
|
+
entity_path: Focus on digests relevant to this entity.
|
|
992
|
+
agent_id: Focus on digests relevant to this agent.
|
|
993
|
+
limit: Maximum number of digests to return.
|
|
994
|
+
branch: Branch to read digests from (defaults to active branch).
|
|
995
|
+
|
|
996
|
+
Returns:
|
|
997
|
+
List of Digest objects ranked by relevance.
|
|
998
|
+
"""
|
|
999
|
+
try:
|
|
1000
|
+
from amfs_cortex.briefing import BriefingService
|
|
1001
|
+
except ImportError:
|
|
1002
|
+
return []
|
|
1003
|
+
|
|
1004
|
+
service = BriefingService(
|
|
1005
|
+
adapter=self._adapter,
|
|
1006
|
+
namespace=self.namespace,
|
|
1007
|
+
)
|
|
1008
|
+
return service.briefing(
|
|
1009
|
+
entity_path=entity_path,
|
|
1010
|
+
agent_id=agent_id or self.agent_id,
|
|
1011
|
+
limit=limit,
|
|
1012
|
+
branch=branch or self._branch,
|
|
1013
|
+
)
|
|
1014
|
+
|
|
1015
|
+
# ------------------------------------------------------------------
|
|
1016
|
+
# Scoped access
|
|
1017
|
+
# ------------------------------------------------------------------
|
|
1018
|
+
|
|
1019
|
+
def scope(self, entity_path: str, *, readonly: bool = False) -> MemoryScope:
|
|
1020
|
+
"""Return a :class:`MemoryScope` bound to *entity_path*."""
|
|
1021
|
+
return MemoryScope(self, entity_path, readonly=readonly)
|
|
1022
|
+
|
|
1023
|
+
def list_scopes(self) -> list[str]:
|
|
1024
|
+
"""Return all unique entity paths that contain at least one entry."""
|
|
1025
|
+
entries = self.list()
|
|
1026
|
+
return sorted({e.entity_path for e in entries})
|
|
1027
|
+
|
|
1028
|
+
def info(self, entity_path: str) -> ScopeInfo:
|
|
1029
|
+
"""Return summary information about a single scope."""
|
|
1030
|
+
entries = [e for e in self.list() if e.entity_path == entity_path]
|
|
1031
|
+
if not entries:
|
|
1032
|
+
return ScopeInfo(path=entity_path, entry_count=0, avg_confidence=0.0)
|
|
1033
|
+
timestamps = [e.provenance.written_at for e in entries]
|
|
1034
|
+
return ScopeInfo(
|
|
1035
|
+
path=entity_path,
|
|
1036
|
+
entry_count=len(entries),
|
|
1037
|
+
avg_confidence=sum(e.confidence for e in entries) / len(entries),
|
|
1038
|
+
keys=[e.key for e in entries],
|
|
1039
|
+
oldest=min(timestamps),
|
|
1040
|
+
newest=max(timestamps),
|
|
1041
|
+
)
|
|
1042
|
+
|
|
1043
|
+
def tree(self, max_depth: int = 3) -> str:
|
|
1044
|
+
"""Render all entity paths as an indented tree with entry counts.
|
|
1045
|
+
|
|
1046
|
+
Example output::
|
|
1047
|
+
|
|
1048
|
+
myapp (5)
|
|
1049
|
+
auth (2)
|
|
1050
|
+
checkout-service (3)
|
|
1051
|
+
"""
|
|
1052
|
+
entries = self.list()
|
|
1053
|
+
path_counts: dict[str, int] = defaultdict(int)
|
|
1054
|
+
for e in entries:
|
|
1055
|
+
path_counts[e.entity_path] += 1
|
|
1056
|
+
|
|
1057
|
+
tree_nodes: dict[str, Any] = {}
|
|
1058
|
+
for path in sorted(path_counts):
|
|
1059
|
+
parts = path.split("/")[:max_depth]
|
|
1060
|
+
node = tree_nodes
|
|
1061
|
+
for part in parts:
|
|
1062
|
+
node = node.setdefault(part, {})
|
|
1063
|
+
|
|
1064
|
+
lines: list[str] = []
|
|
1065
|
+
|
|
1066
|
+
def _walk(node: dict[str, Any], prefix: str, depth: int) -> None:
|
|
1067
|
+
for name in sorted(node):
|
|
1068
|
+
current = f"{prefix}/{name}" if prefix else name
|
|
1069
|
+
count = sum(
|
|
1070
|
+
c for p, c in path_counts.items()
|
|
1071
|
+
if p == current or p.startswith(f"{current}/")
|
|
1072
|
+
)
|
|
1073
|
+
lines.append(f"{' ' * depth}{name} ({count})")
|
|
1074
|
+
if depth < max_depth - 1:
|
|
1075
|
+
_walk(node[name], current, depth + 1)
|
|
1076
|
+
|
|
1077
|
+
_walk(tree_nodes, "", 0)
|
|
1078
|
+
return "\n".join(lines)
|
|
1079
|
+
|
|
1080
|
+
# ------------------------------------------------------------------
|
|
1081
|
+
# Read tracker management
|
|
1082
|
+
# ------------------------------------------------------------------
|
|
1083
|
+
|
|
1084
|
+
def clear_read_log(self) -> None:
|
|
1085
|
+
"""Reset the session read log (e.g. between sub-tasks)."""
|
|
1086
|
+
self._read_tracker.clear()
|
|
1087
|
+
|
|
1088
|
+
# ------------------------------------------------------------------
|
|
1089
|
+
# Lifecycle
|
|
1090
|
+
# ------------------------------------------------------------------
|
|
1091
|
+
|
|
1092
|
+
def close(self) -> None:
|
|
1093
|
+
"""Stop background threads and clean up resources."""
|
|
1094
|
+
if self._lifecycle is not None:
|
|
1095
|
+
self._lifecycle.stop()
|
|
1096
|
+
if hasattr(self._adapter, "close"):
|
|
1097
|
+
self._adapter.close() # type: ignore[attr-defined]
|
|
1098
|
+
|
|
1099
|
+
def __enter__(self) -> AgentMemory:
|
|
1100
|
+
return self
|
|
1101
|
+
|
|
1102
|
+
def __exit__(self, *args: Any) -> None:
|
|
1103
|
+
self.close()
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
class MemoryScope:
|
|
1107
|
+
"""A scoped view of :class:`AgentMemory` bound to a fixed entity path.
|
|
1108
|
+
|
|
1109
|
+
All operations are automatically routed to the underlying memory with
|
|
1110
|
+
the configured *entity_path*. When *readonly* is ``True``, writes
|
|
1111
|
+
raise :class:`PermissionError`.
|
|
1112
|
+
"""
|
|
1113
|
+
|
|
1114
|
+
def __init__(
|
|
1115
|
+
self,
|
|
1116
|
+
memory: AgentMemory,
|
|
1117
|
+
entity_path: str,
|
|
1118
|
+
*,
|
|
1119
|
+
readonly: bool = False,
|
|
1120
|
+
) -> None:
|
|
1121
|
+
self._memory = memory
|
|
1122
|
+
self._entity_path = entity_path
|
|
1123
|
+
self._readonly = readonly
|
|
1124
|
+
|
|
1125
|
+
@property
|
|
1126
|
+
def entity_path(self) -> str:
|
|
1127
|
+
return self._entity_path
|
|
1128
|
+
|
|
1129
|
+
@property
|
|
1130
|
+
def readonly(self) -> bool:
|
|
1131
|
+
return self._readonly
|
|
1132
|
+
|
|
1133
|
+
def read(self, key: str, **kwargs: Any) -> MemoryEntry | None:
|
|
1134
|
+
return self._memory.read(self._entity_path, key, **kwargs)
|
|
1135
|
+
|
|
1136
|
+
def write(self, key: str, value: Any, **kwargs: Any) -> MemoryEntry:
|
|
1137
|
+
if self._readonly:
|
|
1138
|
+
raise PermissionError("Read-only scope")
|
|
1139
|
+
return self._memory.write(self._entity_path, key, value, **kwargs)
|
|
1140
|
+
|
|
1141
|
+
def list(self, **kwargs: Any) -> list[MemoryEntry]:
|
|
1142
|
+
return self._memory.list(self._entity_path, **kwargs)
|
|
1143
|
+
|
|
1144
|
+
def search(self, **kwargs: Any) -> list[MemoryEntry]:
|
|
1145
|
+
return self._memory.search(entity_path=self._entity_path, **kwargs)
|
|
1146
|
+
|
|
1147
|
+
def history(self, key: str, **kwargs: Any) -> list[MemoryEntry]:
|
|
1148
|
+
return self._memory.history(self._entity_path, key, **kwargs)
|
|
1149
|
+
|
|
1150
|
+
def info(self) -> ScopeInfo:
|
|
1151
|
+
return self._memory.info(self._entity_path)
|