synaptic-memory 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- synaptic/__init__.py +72 -0
- synaptic/activity.py +261 -0
- synaptic/agent_search.py +385 -0
- synaptic/backends/__init__.py +1 -0
- synaptic/backends/composite.py +240 -0
- synaptic/backends/memory.py +215 -0
- synaptic/backends/minio_store.py +108 -0
- synaptic/backends/neo4j.py +521 -0
- synaptic/backends/postgresql.py +479 -0
- synaptic/backends/qdrant.py +135 -0
- synaptic/backends/sqlite.py +390 -0
- synaptic/cache.py +67 -0
- synaptic/consolidation.py +175 -0
- synaptic/exporter.py +120 -0
- synaptic/extensions/__init__.py +1 -0
- synaptic/extensions/embedder.py +143 -0
- synaptic/extensions/rewriter.py +60 -0
- synaptic/extensions/tagger_regex.py +48 -0
- synaptic/graph.py +349 -0
- synaptic/hebbian.py +97 -0
- synaptic/mcp/__init__.py +3 -0
- synaptic/mcp/__main__.py +5 -0
- synaptic/mcp/server.py +714 -0
- synaptic/models.py +130 -0
- synaptic/ontology.py +405 -0
- synaptic/protocols.py +98 -0
- synaptic/py.typed +0 -0
- synaptic/resonance.py +75 -0
- synaptic/search.py +131 -0
- synaptic/store.py +111 -0
- synaptic/synonyms.py +73 -0
- synaptic_memory-0.5.0.dist-info/METADATA +449 -0
- synaptic_memory-0.5.0.dist-info/RECORD +36 -0
- synaptic_memory-0.5.0.dist-info/WHEEL +4 -0
- synaptic_memory-0.5.0.dist-info/entry_points.txt +2 -0
- synaptic_memory-0.5.0.dist-info/licenses/LICENSE +21 -0
synaptic/__init__.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"""Synaptic Memory — Brain-inspired knowledge graph for LLM agents."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from synaptic.activity import ActivityTracker
|
|
6
|
+
from synaptic.agent_search import AgentSearch, SearchIntent, suggest_intent
|
|
7
|
+
from synaptic.extensions.embedder import EmbeddingProvider, MockEmbeddingProvider
|
|
8
|
+
from synaptic.graph import SynapticGraph
|
|
9
|
+
from synaptic.models import (
|
|
10
|
+
ActivatedNode,
|
|
11
|
+
ConsolidationLevel,
|
|
12
|
+
DigestResult,
|
|
13
|
+
Edge,
|
|
14
|
+
EdgeKind,
|
|
15
|
+
Node,
|
|
16
|
+
NodeKind,
|
|
17
|
+
SearchResult,
|
|
18
|
+
)
|
|
19
|
+
from synaptic.ontology import (
|
|
20
|
+
OntologyRegistry,
|
|
21
|
+
PropertyDef,
|
|
22
|
+
RelationConstraint,
|
|
23
|
+
TypeDef,
|
|
24
|
+
build_agent_ontology,
|
|
25
|
+
)
|
|
26
|
+
from synaptic.protocols import Digester, GraphTraversal, QueryRewriter, StorageBackend, TagExtractor
|
|
27
|
+
from synaptic.resonance import ResonanceWeights
|
|
28
|
+
|
|
29
|
+
__version__ = "0.5.0"
|
|
30
|
+
|
|
31
|
+
__all__ = [
|
|
32
|
+
"ActivatedNode",
|
|
33
|
+
"ActivityTracker",
|
|
34
|
+
"AgentSearch",
|
|
35
|
+
"ConsolidationLevel",
|
|
36
|
+
"DigestResult",
|
|
37
|
+
"Digester",
|
|
38
|
+
"Edge",
|
|
39
|
+
"EdgeKind",
|
|
40
|
+
"EmbeddingProvider",
|
|
41
|
+
"GraphTraversal",
|
|
42
|
+
"MockEmbeddingProvider",
|
|
43
|
+
"Node",
|
|
44
|
+
"NodeKind",
|
|
45
|
+
"OntologyRegistry",
|
|
46
|
+
"PropertyDef",
|
|
47
|
+
"QueryRewriter",
|
|
48
|
+
"RelationConstraint",
|
|
49
|
+
"ResonanceWeights",
|
|
50
|
+
"SearchIntent",
|
|
51
|
+
"SearchResult",
|
|
52
|
+
"StorageBackend",
|
|
53
|
+
"SynapticGraph",
|
|
54
|
+
"TagExtractor",
|
|
55
|
+
"TypeDef",
|
|
56
|
+
"build_agent_ontology",
|
|
57
|
+
"suggest_intent",
|
|
58
|
+
]
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def __getattr__(name: str) -> object:
|
|
62
|
+
"""Lazy import for optional-dep providers (avoids crash when aiohttp not installed)."""
|
|
63
|
+
if name == "OpenAIEmbeddingProvider":
|
|
64
|
+
from synaptic.extensions.embedder import OpenAIEmbeddingProvider # noqa: PLC0415
|
|
65
|
+
|
|
66
|
+
return OpenAIEmbeddingProvider
|
|
67
|
+
if name == "OllamaEmbeddingProvider":
|
|
68
|
+
from synaptic.extensions.embedder import OllamaEmbeddingProvider # noqa: PLC0415
|
|
69
|
+
|
|
70
|
+
return OllamaEmbeddingProvider
|
|
71
|
+
msg = f"module 'synaptic' has no attribute {name!r}"
|
|
72
|
+
raise AttributeError(msg)
|
synaptic/activity.py
ADDED
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
"""Agent activity tracker — records sessions, tool calls, decisions, outcomes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from time import time
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from synaptic.models import (
|
|
10
|
+
Edge,
|
|
11
|
+
EdgeKind,
|
|
12
|
+
Node,
|
|
13
|
+
NodeKind,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from synaptic.graph import SynapticGraph
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ActivityTracker:
|
|
21
|
+
"""Records agent activities as nodes/edges in the knowledge graph.
|
|
22
|
+
|
|
23
|
+
Uses only the SynapticGraph public API — no internal coupling.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
__slots__ = ("_graph", "_prev_activity")
|
|
27
|
+
|
|
28
|
+
def __init__(self, graph: SynapticGraph) -> None:
|
|
29
|
+
self._graph = graph
|
|
30
|
+
# Track last activity per session for FOLLOWED_BY chains
|
|
31
|
+
self._prev_activity: dict[str, str] = {}
|
|
32
|
+
|
|
33
|
+
async def start_session(
|
|
34
|
+
self,
|
|
35
|
+
*,
|
|
36
|
+
agent_id: str = "",
|
|
37
|
+
description: str = "",
|
|
38
|
+
metadata: dict[str, str] | None = None,
|
|
39
|
+
) -> Node:
|
|
40
|
+
"""Create a Session node."""
|
|
41
|
+
props: dict[str, str] = {
|
|
42
|
+
"agent_id": agent_id,
|
|
43
|
+
"start_time": str(time()),
|
|
44
|
+
"status": "active",
|
|
45
|
+
}
|
|
46
|
+
if metadata:
|
|
47
|
+
props.update(metadata)
|
|
48
|
+
|
|
49
|
+
node = await self._graph.add(
|
|
50
|
+
title=f"Session: {description or agent_id or 'unnamed'}",
|
|
51
|
+
content=description,
|
|
52
|
+
kind=NodeKind.SESSION,
|
|
53
|
+
tags=["session", agent_id] if agent_id else ["session"],
|
|
54
|
+
properties=props,
|
|
55
|
+
)
|
|
56
|
+
return node
|
|
57
|
+
|
|
58
|
+
async def end_session(
|
|
59
|
+
self,
|
|
60
|
+
session_id: str,
|
|
61
|
+
*,
|
|
62
|
+
outcome: str = "",
|
|
63
|
+
) -> None:
|
|
64
|
+
"""Close a session, record end time and outcome."""
|
|
65
|
+
node = await self._graph.get(session_id)
|
|
66
|
+
if node is None:
|
|
67
|
+
return
|
|
68
|
+
node.properties["end_time"] = str(time())
|
|
69
|
+
node.properties["status"] = "completed"
|
|
70
|
+
if outcome:
|
|
71
|
+
node.properties["outcome"] = outcome
|
|
72
|
+
await self._graph.backend.update_node(node)
|
|
73
|
+
self._prev_activity.pop(session_id, None)
|
|
74
|
+
|
|
75
|
+
async def log_tool_call(
|
|
76
|
+
self,
|
|
77
|
+
session_id: str,
|
|
78
|
+
*,
|
|
79
|
+
tool_name: str,
|
|
80
|
+
parameters: dict[str, object] | None = None,
|
|
81
|
+
result: str = "",
|
|
82
|
+
success: bool = True,
|
|
83
|
+
duration_ms: float = 0.0,
|
|
84
|
+
) -> Node:
|
|
85
|
+
"""Record a tool invocation. Links to session via PART_OF."""
|
|
86
|
+
props: dict[str, str] = {
|
|
87
|
+
"tool_name": tool_name,
|
|
88
|
+
"success": str(success).lower(),
|
|
89
|
+
"duration_ms": str(duration_ms),
|
|
90
|
+
}
|
|
91
|
+
if parameters:
|
|
92
|
+
props["parameters"] = json.dumps(parameters, default=str)
|
|
93
|
+
if result:
|
|
94
|
+
props["result_summary"] = result[:1000]
|
|
95
|
+
|
|
96
|
+
node = await self._graph.add(
|
|
97
|
+
title=f"Tool: {tool_name}",
|
|
98
|
+
content=result[:2000] if result else "",
|
|
99
|
+
kind=NodeKind.TOOL_CALL,
|
|
100
|
+
tags=["tool_call", tool_name],
|
|
101
|
+
properties=props,
|
|
102
|
+
)
|
|
103
|
+
await self._link_to_session(node.id, session_id)
|
|
104
|
+
return node
|
|
105
|
+
|
|
106
|
+
async def record_decision(
|
|
107
|
+
self,
|
|
108
|
+
session_id: str,
|
|
109
|
+
*,
|
|
110
|
+
title: str,
|
|
111
|
+
rationale: str,
|
|
112
|
+
alternatives: list[str] | None = None,
|
|
113
|
+
context_node_ids: list[str] | None = None,
|
|
114
|
+
) -> Node:
|
|
115
|
+
"""Record a decision with rationale."""
|
|
116
|
+
props: dict[str, str] = {
|
|
117
|
+
"rationale": rationale,
|
|
118
|
+
}
|
|
119
|
+
if alternatives:
|
|
120
|
+
props["alternatives"] = json.dumps(alternatives)
|
|
121
|
+
|
|
122
|
+
node = await self._graph.add(
|
|
123
|
+
title=title,
|
|
124
|
+
content=rationale,
|
|
125
|
+
kind=NodeKind.DECISION,
|
|
126
|
+
tags=["decision"],
|
|
127
|
+
properties=props,
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
await self._link_to_session(node.id, session_id)
|
|
131
|
+
|
|
132
|
+
# Link to context nodes
|
|
133
|
+
if context_node_ids:
|
|
134
|
+
for ctx_id in context_node_ids:
|
|
135
|
+
await self._graph.link(
|
|
136
|
+
node.id, ctx_id,
|
|
137
|
+
kind=EdgeKind.DEPENDS_ON,
|
|
138
|
+
weight=0.8,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
return node
|
|
142
|
+
|
|
143
|
+
async def record_observation(
|
|
144
|
+
self,
|
|
145
|
+
session_id: str,
|
|
146
|
+
*,
|
|
147
|
+
title: str,
|
|
148
|
+
content: str,
|
|
149
|
+
source_node_id: str = "",
|
|
150
|
+
) -> Node:
|
|
151
|
+
"""Record an observation from tool output or environment."""
|
|
152
|
+
node = await self._graph.add(
|
|
153
|
+
title=title,
|
|
154
|
+
content=content,
|
|
155
|
+
kind=NodeKind.OBSERVATION,
|
|
156
|
+
tags=["observation"],
|
|
157
|
+
)
|
|
158
|
+
await self._link_to_session(node.id, session_id)
|
|
159
|
+
|
|
160
|
+
if source_node_id:
|
|
161
|
+
await self._graph.link(
|
|
162
|
+
source_node_id, node.id,
|
|
163
|
+
kind=EdgeKind.PRODUCED,
|
|
164
|
+
weight=0.7,
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
return node
|
|
168
|
+
|
|
169
|
+
async def record_outcome(
|
|
170
|
+
self,
|
|
171
|
+
decision_id: str,
|
|
172
|
+
*,
|
|
173
|
+
title: str,
|
|
174
|
+
content: str,
|
|
175
|
+
success: bool,
|
|
176
|
+
) -> Node:
|
|
177
|
+
"""Link an outcome to a prior decision. Triggers Hebbian reinforcement."""
|
|
178
|
+
props: dict[str, str] = {
|
|
179
|
+
"success": str(success).lower(),
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
node = await self._graph.add(
|
|
183
|
+
title=title,
|
|
184
|
+
content=content,
|
|
185
|
+
kind=NodeKind.OUTCOME,
|
|
186
|
+
tags=["outcome", "success" if success else "failure"],
|
|
187
|
+
properties=props,
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Link decision → outcome
|
|
191
|
+
await self._graph.link(
|
|
192
|
+
decision_id, node.id,
|
|
193
|
+
kind=EdgeKind.RESULTED_IN,
|
|
194
|
+
weight=1.0,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
# Hebbian reinforcement on the decision and outcome
|
|
198
|
+
await self._graph.reinforce([decision_id, node.id], success=success)
|
|
199
|
+
|
|
200
|
+
return node
|
|
201
|
+
|
|
202
|
+
async def get_session_timeline(self, session_id: str) -> list[Node]:
|
|
203
|
+
"""Return all activity nodes in a session, ordered by created_at."""
|
|
204
|
+
edges = await self._graph.backend.get_edges(session_id, direction="incoming")
|
|
205
|
+
part_of_edges = [e for e in edges if e.kind == EdgeKind.PART_OF]
|
|
206
|
+
|
|
207
|
+
nodes: list[Node] = []
|
|
208
|
+
for edge in part_of_edges:
|
|
209
|
+
node = await self._graph.backend.get_node(edge.source_id)
|
|
210
|
+
if node is not None:
|
|
211
|
+
nodes.append(node)
|
|
212
|
+
|
|
213
|
+
nodes.sort(key=lambda n: n.created_at)
|
|
214
|
+
return nodes
|
|
215
|
+
|
|
216
|
+
async def get_decision_chain(self, decision_id: str) -> list[tuple[Node, Edge]]:
|
|
217
|
+
"""Follow decision → outcome → lesson chain."""
|
|
218
|
+
chain: list[tuple[Node, Edge]] = []
|
|
219
|
+
|
|
220
|
+
decision = await self._graph.backend.get_node(decision_id)
|
|
221
|
+
if decision is None:
|
|
222
|
+
return chain
|
|
223
|
+
|
|
224
|
+
# Find outcomes
|
|
225
|
+
edges = await self._graph.backend.get_edges(decision_id, direction="outgoing")
|
|
226
|
+
for edge in edges:
|
|
227
|
+
if edge.kind == EdgeKind.RESULTED_IN:
|
|
228
|
+
outcome = await self._graph.backend.get_node(edge.target_id)
|
|
229
|
+
if outcome is not None:
|
|
230
|
+
chain.append((outcome, edge))
|
|
231
|
+
# Find lessons learned from outcomes
|
|
232
|
+
lesson_edges = await self._graph.backend.get_edges(
|
|
233
|
+
outcome.id, direction="incoming"
|
|
234
|
+
)
|
|
235
|
+
for le in lesson_edges:
|
|
236
|
+
if le.kind == EdgeKind.LEARNED_FROM:
|
|
237
|
+
lesson = await self._graph.backend.get_node(le.source_id)
|
|
238
|
+
if lesson is not None:
|
|
239
|
+
chain.append((lesson, le))
|
|
240
|
+
|
|
241
|
+
return chain
|
|
242
|
+
|
|
243
|
+
# --- Internal helpers ---
|
|
244
|
+
|
|
245
|
+
async def _link_to_session(self, node_id: str, session_id: str) -> None:
|
|
246
|
+
"""Link activity node to session via PART_OF + FOLLOWED_BY chain."""
|
|
247
|
+
# PART_OF → session
|
|
248
|
+
await self._graph.link(
|
|
249
|
+
node_id, session_id,
|
|
250
|
+
kind=EdgeKind.PART_OF,
|
|
251
|
+
weight=1.0,
|
|
252
|
+
)
|
|
253
|
+
# FOLLOWED_BY chain (temporal ordering)
|
|
254
|
+
prev_id = self._prev_activity.get(session_id)
|
|
255
|
+
if prev_id:
|
|
256
|
+
await self._graph.link(
|
|
257
|
+
prev_id, node_id,
|
|
258
|
+
kind=EdgeKind.FOLLOWED_BY,
|
|
259
|
+
weight=1.0,
|
|
260
|
+
)
|
|
261
|
+
self._prev_activity[session_id] = node_id
|