alma-memory 0.4.0__py3-none-any.whl → 0.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alma/__init__.py +121 -45
- alma/confidence/__init__.py +1 -1
- alma/confidence/engine.py +92 -58
- alma/confidence/types.py +34 -14
- alma/config/loader.py +3 -2
- alma/consolidation/__init__.py +23 -0
- alma/consolidation/engine.py +678 -0
- alma/consolidation/prompts.py +84 -0
- alma/core.py +136 -28
- alma/domains/__init__.py +6 -6
- alma/domains/factory.py +12 -9
- alma/domains/schemas.py +17 -3
- alma/domains/types.py +8 -4
- alma/events/__init__.py +75 -0
- alma/events/emitter.py +284 -0
- alma/events/storage_mixin.py +246 -0
- alma/events/types.py +126 -0
- alma/events/webhook.py +425 -0
- alma/exceptions.py +49 -0
- alma/extraction/__init__.py +31 -0
- alma/extraction/auto_learner.py +265 -0
- alma/extraction/extractor.py +420 -0
- alma/graph/__init__.py +106 -0
- alma/graph/backends/__init__.py +32 -0
- alma/graph/backends/kuzu.py +624 -0
- alma/graph/backends/memgraph.py +432 -0
- alma/graph/backends/memory.py +236 -0
- alma/graph/backends/neo4j.py +417 -0
- alma/graph/base.py +159 -0
- alma/graph/extraction.py +198 -0
- alma/graph/store.py +860 -0
- alma/harness/__init__.py +4 -4
- alma/harness/base.py +18 -9
- alma/harness/domains.py +27 -11
- alma/initializer/__init__.py +1 -1
- alma/initializer/initializer.py +51 -43
- alma/initializer/types.py +25 -17
- alma/integration/__init__.py +9 -9
- alma/integration/claude_agents.py +32 -20
- alma/integration/helena.py +32 -22
- alma/integration/victor.py +57 -33
- alma/learning/__init__.py +27 -27
- alma/learning/forgetting.py +198 -148
- alma/learning/heuristic_extractor.py +40 -24
- alma/learning/protocols.py +65 -17
- alma/learning/validation.py +7 -2
- alma/mcp/__init__.py +4 -4
- alma/mcp/__main__.py +2 -1
- alma/mcp/resources.py +17 -16
- alma/mcp/server.py +102 -44
- alma/mcp/tools.py +180 -45
- alma/observability/__init__.py +84 -0
- alma/observability/config.py +302 -0
- alma/observability/logging.py +424 -0
- alma/observability/metrics.py +583 -0
- alma/observability/tracing.py +440 -0
- alma/progress/__init__.py +3 -3
- alma/progress/tracker.py +26 -20
- alma/progress/types.py +8 -12
- alma/py.typed +0 -0
- alma/retrieval/__init__.py +11 -11
- alma/retrieval/cache.py +20 -21
- alma/retrieval/embeddings.py +4 -4
- alma/retrieval/engine.py +179 -39
- alma/retrieval/scoring.py +73 -63
- alma/session/__init__.py +2 -2
- alma/session/manager.py +5 -5
- alma/session/types.py +5 -4
- alma/storage/__init__.py +70 -0
- alma/storage/azure_cosmos.py +414 -133
- alma/storage/base.py +215 -4
- alma/storage/chroma.py +1443 -0
- alma/storage/constants.py +103 -0
- alma/storage/file_based.py +59 -28
- alma/storage/migrations/__init__.py +21 -0
- alma/storage/migrations/base.py +321 -0
- alma/storage/migrations/runner.py +323 -0
- alma/storage/migrations/version_stores.py +337 -0
- alma/storage/migrations/versions/__init__.py +11 -0
- alma/storage/migrations/versions/v1_0_0.py +373 -0
- alma/storage/pinecone.py +1080 -0
- alma/storage/postgresql.py +1559 -0
- alma/storage/qdrant.py +1306 -0
- alma/storage/sqlite_local.py +504 -60
- alma/testing/__init__.py +46 -0
- alma/testing/factories.py +301 -0
- alma/testing/mocks.py +389 -0
- alma/types.py +62 -14
- alma_memory-0.5.1.dist-info/METADATA +939 -0
- alma_memory-0.5.1.dist-info/RECORD +93 -0
- {alma_memory-0.4.0.dist-info → alma_memory-0.5.1.dist-info}/WHEEL +1 -1
- alma_memory-0.4.0.dist-info/METADATA +0 -488
- alma_memory-0.4.0.dist-info/RECORD +0 -52
- {alma_memory-0.4.0.dist-info → alma_memory-0.5.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ALMA Graph Memory - Memgraph Backend.
|
|
3
|
+
|
|
4
|
+
Memgraph implementation of the GraphBackend interface.
|
|
5
|
+
Memgraph is compatible with Neo4j's Bolt protocol, so the neo4j Python driver works with it.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from typing import Any, Dict, List, Optional
|
|
12
|
+
|
|
13
|
+
from alma.graph.base import GraphBackend
|
|
14
|
+
from alma.graph.store import Entity, Relationship
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MemgraphBackend(GraphBackend):
|
|
20
|
+
"""
|
|
21
|
+
Memgraph graph database backend.
|
|
22
|
+
|
|
23
|
+
Memgraph is an in-memory graph database compatible with Neo4j's Bolt protocol.
|
|
24
|
+
This backend uses the neo4j Python driver.
|
|
25
|
+
|
|
26
|
+
Requires neo4j Python driver: pip install neo4j
|
|
27
|
+
|
|
28
|
+
Example usage:
|
|
29
|
+
backend = MemgraphBackend(
|
|
30
|
+
uri="bolt://localhost:7687",
|
|
31
|
+
username="",
|
|
32
|
+
password=""
|
|
33
|
+
)
|
|
34
|
+
backend.add_entity(entity)
|
|
35
|
+
backend.close()
|
|
36
|
+
|
|
37
|
+
Note: Memgraph typically doesn't require authentication by default,
|
|
38
|
+
but it can be configured. Use empty strings for username/password if
|
|
39
|
+
authentication is disabled.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
def __init__(
|
|
43
|
+
self,
|
|
44
|
+
uri: str = "bolt://localhost:7687",
|
|
45
|
+
username: str = "",
|
|
46
|
+
password: str = "",
|
|
47
|
+
database: str = "memgraph",
|
|
48
|
+
):
|
|
49
|
+
"""
|
|
50
|
+
Initialize Memgraph connection.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
uri: Memgraph connection URI (bolt://)
|
|
54
|
+
username: Database username (empty if auth disabled)
|
|
55
|
+
password: Database password (empty if auth disabled)
|
|
56
|
+
database: Database name (default: "memgraph")
|
|
57
|
+
"""
|
|
58
|
+
self.uri = uri
|
|
59
|
+
self.username = username
|
|
60
|
+
self.password = password
|
|
61
|
+
self.database = database
|
|
62
|
+
self._driver = None
|
|
63
|
+
|
|
64
|
+
def _get_driver(self):
|
|
65
|
+
"""Lazy initialization of Memgraph driver (via neo4j package)."""
|
|
66
|
+
if self._driver is None:
|
|
67
|
+
try:
|
|
68
|
+
from neo4j import GraphDatabase
|
|
69
|
+
|
|
70
|
+
# Memgraph uses the same Bolt protocol as Neo4j
|
|
71
|
+
if self.username and self.password:
|
|
72
|
+
self._driver = GraphDatabase.driver(
|
|
73
|
+
self.uri,
|
|
74
|
+
auth=(self.username, self.password),
|
|
75
|
+
)
|
|
76
|
+
else:
|
|
77
|
+
# No authentication
|
|
78
|
+
self._driver = GraphDatabase.driver(self.uri)
|
|
79
|
+
except ImportError as err:
|
|
80
|
+
raise ImportError(
|
|
81
|
+
"neo4j package required for Memgraph graph backend. "
|
|
82
|
+
"Install with: pip install neo4j"
|
|
83
|
+
) from err
|
|
84
|
+
return self._driver
|
|
85
|
+
|
|
86
|
+
def _run_query(self, query: str, parameters: Optional[Dict] = None) -> List[Dict]:
|
|
87
|
+
"""Execute a Cypher query."""
|
|
88
|
+
driver = self._get_driver()
|
|
89
|
+
# Memgraph doesn't use the database parameter in the same way as Neo4j
|
|
90
|
+
# Most Memgraph setups use a single database
|
|
91
|
+
with driver.session() as session:
|
|
92
|
+
result = session.run(query, parameters or {})
|
|
93
|
+
return [dict(record) for record in result]
|
|
94
|
+
|
|
95
|
+
def add_entity(self, entity: Entity) -> str:
|
|
96
|
+
"""Add or update an entity in Memgraph."""
|
|
97
|
+
# Extract project_id and agent from properties if present
|
|
98
|
+
properties = entity.properties.copy()
|
|
99
|
+
project_id = properties.pop("project_id", None)
|
|
100
|
+
agent = properties.pop("agent", None)
|
|
101
|
+
|
|
102
|
+
query = """
|
|
103
|
+
MERGE (e:Entity {id: $id})
|
|
104
|
+
SET e.name = $name,
|
|
105
|
+
e.entity_type = $entity_type,
|
|
106
|
+
e.properties = $properties,
|
|
107
|
+
e.created_at = $created_at
|
|
108
|
+
"""
|
|
109
|
+
params = {
|
|
110
|
+
"id": entity.id,
|
|
111
|
+
"name": entity.name,
|
|
112
|
+
"entity_type": entity.entity_type,
|
|
113
|
+
"properties": json.dumps(properties),
|
|
114
|
+
"created_at": entity.created_at.isoformat(),
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
# Add optional fields if present
|
|
118
|
+
if project_id:
|
|
119
|
+
query += ", e.project_id = $project_id"
|
|
120
|
+
params["project_id"] = project_id
|
|
121
|
+
if agent:
|
|
122
|
+
query += ", e.agent = $agent"
|
|
123
|
+
params["agent"] = agent
|
|
124
|
+
|
|
125
|
+
query += " RETURN e.id as id"
|
|
126
|
+
|
|
127
|
+
result = self._run_query(query, params)
|
|
128
|
+
return result[0]["id"] if result else entity.id
|
|
129
|
+
|
|
130
|
+
def add_relationship(self, relationship: Relationship) -> str:
|
|
131
|
+
"""Add or update a relationship in Memgraph."""
|
|
132
|
+
# Sanitize relationship type for Cypher (remove special characters)
|
|
133
|
+
rel_type = (
|
|
134
|
+
relationship.relation_type.replace("-", "_").replace(" ", "_").upper()
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
query = f"""
|
|
138
|
+
MATCH (source:Entity {{id: $source_id}})
|
|
139
|
+
MATCH (target:Entity {{id: $target_id}})
|
|
140
|
+
MERGE (source)-[r:{rel_type}]->(target)
|
|
141
|
+
SET r.id = $id,
|
|
142
|
+
r.properties = $properties,
|
|
143
|
+
r.confidence = $confidence,
|
|
144
|
+
r.created_at = $created_at
|
|
145
|
+
RETURN r.id as id
|
|
146
|
+
"""
|
|
147
|
+
result = self._run_query(
|
|
148
|
+
query,
|
|
149
|
+
{
|
|
150
|
+
"id": relationship.id,
|
|
151
|
+
"source_id": relationship.source_id,
|
|
152
|
+
"target_id": relationship.target_id,
|
|
153
|
+
"properties": json.dumps(relationship.properties),
|
|
154
|
+
"confidence": relationship.confidence,
|
|
155
|
+
"created_at": relationship.created_at.isoformat(),
|
|
156
|
+
},
|
|
157
|
+
)
|
|
158
|
+
return result[0]["id"] if result else relationship.id
|
|
159
|
+
|
|
160
|
+
def get_entity(self, entity_id: str) -> Optional[Entity]:
|
|
161
|
+
"""Get an entity by ID."""
|
|
162
|
+
query = """
|
|
163
|
+
MATCH (e:Entity {id: $id})
|
|
164
|
+
RETURN e.id as id, e.name as name, e.entity_type as entity_type,
|
|
165
|
+
e.properties as properties, e.created_at as created_at,
|
|
166
|
+
e.project_id as project_id, e.agent as agent
|
|
167
|
+
"""
|
|
168
|
+
result = self._run_query(query, {"id": entity_id})
|
|
169
|
+
if not result:
|
|
170
|
+
return None
|
|
171
|
+
|
|
172
|
+
r = result[0]
|
|
173
|
+
properties = json.loads(r["properties"]) if r["properties"] else {}
|
|
174
|
+
|
|
175
|
+
# Add project_id and agent back to properties if present
|
|
176
|
+
if r.get("project_id"):
|
|
177
|
+
properties["project_id"] = r["project_id"]
|
|
178
|
+
if r.get("agent"):
|
|
179
|
+
properties["agent"] = r["agent"]
|
|
180
|
+
|
|
181
|
+
return Entity(
|
|
182
|
+
id=r["id"],
|
|
183
|
+
name=r["name"],
|
|
184
|
+
entity_type=r["entity_type"],
|
|
185
|
+
properties=properties,
|
|
186
|
+
created_at=(
|
|
187
|
+
datetime.fromisoformat(r["created_at"])
|
|
188
|
+
if r["created_at"]
|
|
189
|
+
else datetime.now(timezone.utc)
|
|
190
|
+
),
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
def get_entities(
|
|
194
|
+
self,
|
|
195
|
+
entity_type: Optional[str] = None,
|
|
196
|
+
project_id: Optional[str] = None,
|
|
197
|
+
agent: Optional[str] = None,
|
|
198
|
+
limit: int = 100,
|
|
199
|
+
) -> List[Entity]:
|
|
200
|
+
"""Get entities with optional filtering."""
|
|
201
|
+
conditions = []
|
|
202
|
+
params: Dict[str, Any] = {"limit": limit}
|
|
203
|
+
|
|
204
|
+
if entity_type:
|
|
205
|
+
conditions.append("e.entity_type = $entity_type")
|
|
206
|
+
params["entity_type"] = entity_type
|
|
207
|
+
if project_id:
|
|
208
|
+
conditions.append("e.project_id = $project_id")
|
|
209
|
+
params["project_id"] = project_id
|
|
210
|
+
if agent:
|
|
211
|
+
conditions.append("e.agent = $agent")
|
|
212
|
+
params["agent"] = agent
|
|
213
|
+
|
|
214
|
+
where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
|
|
215
|
+
|
|
216
|
+
query = f"""
|
|
217
|
+
MATCH (e:Entity)
|
|
218
|
+
{where_clause}
|
|
219
|
+
RETURN e.id as id, e.name as name, e.entity_type as entity_type,
|
|
220
|
+
e.properties as properties, e.created_at as created_at,
|
|
221
|
+
e.project_id as project_id, e.agent as agent
|
|
222
|
+
LIMIT $limit
|
|
223
|
+
"""
|
|
224
|
+
|
|
225
|
+
results = self._run_query(query, params)
|
|
226
|
+
entities = []
|
|
227
|
+
for r in results:
|
|
228
|
+
properties = json.loads(r["properties"]) if r["properties"] else {}
|
|
229
|
+
if r.get("project_id"):
|
|
230
|
+
properties["project_id"] = r["project_id"]
|
|
231
|
+
if r.get("agent"):
|
|
232
|
+
properties["agent"] = r["agent"]
|
|
233
|
+
|
|
234
|
+
entities.append(
|
|
235
|
+
Entity(
|
|
236
|
+
id=r["id"],
|
|
237
|
+
name=r["name"],
|
|
238
|
+
entity_type=r["entity_type"],
|
|
239
|
+
properties=properties,
|
|
240
|
+
created_at=(
|
|
241
|
+
datetime.fromisoformat(r["created_at"])
|
|
242
|
+
if r["created_at"]
|
|
243
|
+
else datetime.now(timezone.utc)
|
|
244
|
+
),
|
|
245
|
+
)
|
|
246
|
+
)
|
|
247
|
+
return entities
|
|
248
|
+
|
|
249
|
+
def get_relationships(self, entity_id: str) -> List[Relationship]:
|
|
250
|
+
"""Get all relationships for an entity (both directions)."""
|
|
251
|
+
query = """
|
|
252
|
+
MATCH (e:Entity {id: $entity_id})-[r]-(other:Entity)
|
|
253
|
+
RETURN r.id as id,
|
|
254
|
+
CASE WHEN startNode(r).id = $entity_id THEN e.id ELSE other.id END as source_id,
|
|
255
|
+
CASE WHEN endNode(r).id = $entity_id THEN e.id ELSE other.id END as target_id,
|
|
256
|
+
type(r) as relation_type, r.properties as properties,
|
|
257
|
+
r.confidence as confidence, r.created_at as created_at
|
|
258
|
+
"""
|
|
259
|
+
|
|
260
|
+
results = self._run_query(query, {"entity_id": entity_id})
|
|
261
|
+
relationships = []
|
|
262
|
+
for r in results:
|
|
263
|
+
rel_id = (
|
|
264
|
+
r["id"] or f"{r['source_id']}-{r['relation_type']}-{r['target_id']}"
|
|
265
|
+
)
|
|
266
|
+
relationships.append(
|
|
267
|
+
Relationship(
|
|
268
|
+
id=rel_id,
|
|
269
|
+
source_id=r["source_id"],
|
|
270
|
+
target_id=r["target_id"],
|
|
271
|
+
relation_type=r["relation_type"],
|
|
272
|
+
properties=json.loads(r["properties"]) if r["properties"] else {},
|
|
273
|
+
confidence=r["confidence"] or 1.0,
|
|
274
|
+
created_at=(
|
|
275
|
+
datetime.fromisoformat(r["created_at"])
|
|
276
|
+
if r["created_at"]
|
|
277
|
+
else datetime.now(timezone.utc)
|
|
278
|
+
),
|
|
279
|
+
)
|
|
280
|
+
)
|
|
281
|
+
return relationships
|
|
282
|
+
|
|
283
|
+
def search_entities(
|
|
284
|
+
self,
|
|
285
|
+
query: str,
|
|
286
|
+
embedding: Optional[List[float]] = None,
|
|
287
|
+
top_k: int = 10,
|
|
288
|
+
) -> List[Entity]:
|
|
289
|
+
"""
|
|
290
|
+
Search for entities by name.
|
|
291
|
+
|
|
292
|
+
Note: Vector similarity search requires Memgraph MAGE with vector operations.
|
|
293
|
+
Falls back to text search if embedding is provided but vector index
|
|
294
|
+
is not available.
|
|
295
|
+
"""
|
|
296
|
+
# For now, we do text-based search
|
|
297
|
+
# Vector search can be added when Memgraph MAGE is configured
|
|
298
|
+
cypher = """
|
|
299
|
+
MATCH (e:Entity)
|
|
300
|
+
WHERE toLower(e.name) CONTAINS toLower($query)
|
|
301
|
+
RETURN e.id as id, e.name as name, e.entity_type as entity_type,
|
|
302
|
+
e.properties as properties, e.created_at as created_at,
|
|
303
|
+
e.project_id as project_id, e.agent as agent
|
|
304
|
+
LIMIT $limit
|
|
305
|
+
"""
|
|
306
|
+
|
|
307
|
+
results = self._run_query(cypher, {"query": query, "limit": top_k})
|
|
308
|
+
entities = []
|
|
309
|
+
for r in results:
|
|
310
|
+
properties = json.loads(r["properties"]) if r["properties"] else {}
|
|
311
|
+
if r.get("project_id"):
|
|
312
|
+
properties["project_id"] = r["project_id"]
|
|
313
|
+
if r.get("agent"):
|
|
314
|
+
properties["agent"] = r["agent"]
|
|
315
|
+
|
|
316
|
+
entities.append(
|
|
317
|
+
Entity(
|
|
318
|
+
id=r["id"],
|
|
319
|
+
name=r["name"],
|
|
320
|
+
entity_type=r["entity_type"],
|
|
321
|
+
properties=properties,
|
|
322
|
+
created_at=(
|
|
323
|
+
datetime.fromisoformat(r["created_at"])
|
|
324
|
+
if r["created_at"]
|
|
325
|
+
else datetime.now(timezone.utc)
|
|
326
|
+
),
|
|
327
|
+
)
|
|
328
|
+
)
|
|
329
|
+
return entities
|
|
330
|
+
|
|
331
|
+
def delete_entity(self, entity_id: str) -> bool:
|
|
332
|
+
"""Delete an entity and its relationships."""
|
|
333
|
+
query = """
|
|
334
|
+
MATCH (e:Entity {id: $id})
|
|
335
|
+
DETACH DELETE e
|
|
336
|
+
RETURN count(e) as deleted
|
|
337
|
+
"""
|
|
338
|
+
result = self._run_query(query, {"id": entity_id})
|
|
339
|
+
return result[0]["deleted"] > 0 if result else False
|
|
340
|
+
|
|
341
|
+
def delete_relationship(self, relationship_id: str) -> bool:
|
|
342
|
+
"""Delete a specific relationship by ID."""
|
|
343
|
+
query = """
|
|
344
|
+
MATCH ()-[r]-()
|
|
345
|
+
WHERE r.id = $id
|
|
346
|
+
DELETE r
|
|
347
|
+
RETURN count(r) as deleted
|
|
348
|
+
"""
|
|
349
|
+
result = self._run_query(query, {"id": relationship_id})
|
|
350
|
+
return result[0]["deleted"] > 0 if result else False
|
|
351
|
+
|
|
352
|
+
def close(self) -> None:
|
|
353
|
+
"""Close the Memgraph driver connection."""
|
|
354
|
+
if self._driver:
|
|
355
|
+
self._driver.close()
|
|
356
|
+
self._driver = None
|
|
357
|
+
|
|
358
|
+
# Additional methods for compatibility with existing GraphStore API
|
|
359
|
+
|
|
360
|
+
def find_entities(
|
|
361
|
+
self,
|
|
362
|
+
name: Optional[str] = None,
|
|
363
|
+
entity_type: Optional[str] = None,
|
|
364
|
+
limit: int = 10,
|
|
365
|
+
) -> List[Entity]:
|
|
366
|
+
"""
|
|
367
|
+
Find entities by name or type.
|
|
368
|
+
|
|
369
|
+
This method provides compatibility with the existing GraphStore API.
|
|
370
|
+
"""
|
|
371
|
+
if name:
|
|
372
|
+
return self.search_entities(query=name, top_k=limit)
|
|
373
|
+
|
|
374
|
+
return self.get_entities(entity_type=entity_type, limit=limit)
|
|
375
|
+
|
|
376
|
+
def get_relationships_directional(
|
|
377
|
+
self,
|
|
378
|
+
entity_id: str,
|
|
379
|
+
direction: str = "both",
|
|
380
|
+
relation_type: Optional[str] = None,
|
|
381
|
+
) -> List[Relationship]:
|
|
382
|
+
"""
|
|
383
|
+
Get relationships for an entity with direction control.
|
|
384
|
+
|
|
385
|
+
This method provides compatibility with the existing GraphStore API.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
entity_id: The entity ID.
|
|
389
|
+
direction: "outgoing", "incoming", or "both".
|
|
390
|
+
relation_type: Optional filter by relationship type.
|
|
391
|
+
|
|
392
|
+
Returns:
|
|
393
|
+
List of matching relationships.
|
|
394
|
+
"""
|
|
395
|
+
if direction == "outgoing":
|
|
396
|
+
pattern = "(e)-[r]->(other)"
|
|
397
|
+
elif direction == "incoming":
|
|
398
|
+
pattern = "(e)<-[r]-(other)"
|
|
399
|
+
else:
|
|
400
|
+
pattern = "(e)-[r]-(other)"
|
|
401
|
+
|
|
402
|
+
type_filter = f":{relation_type}" if relation_type else ""
|
|
403
|
+
|
|
404
|
+
query = f"""
|
|
405
|
+
MATCH (e:Entity {{id: $entity_id}}){pattern.replace("[r]", f"[r{type_filter}]")}
|
|
406
|
+
RETURN r.id as id, e.id as source_id, other.id as target_id,
|
|
407
|
+
type(r) as relation_type, r.properties as properties,
|
|
408
|
+
r.confidence as confidence, r.created_at as created_at
|
|
409
|
+
"""
|
|
410
|
+
|
|
411
|
+
results = self._run_query(query, {"entity_id": entity_id})
|
|
412
|
+
relationships = []
|
|
413
|
+
for r in results:
|
|
414
|
+
rel_id = (
|
|
415
|
+
r["id"] or f"{r['source_id']}-{r['relation_type']}-{r['target_id']}"
|
|
416
|
+
)
|
|
417
|
+
relationships.append(
|
|
418
|
+
Relationship(
|
|
419
|
+
id=rel_id,
|
|
420
|
+
source_id=r["source_id"],
|
|
421
|
+
target_id=r["target_id"],
|
|
422
|
+
relation_type=r["relation_type"],
|
|
423
|
+
properties=json.loads(r["properties"]) if r["properties"] else {},
|
|
424
|
+
confidence=r["confidence"] or 1.0,
|
|
425
|
+
created_at=(
|
|
426
|
+
datetime.fromisoformat(r["created_at"])
|
|
427
|
+
if r["created_at"]
|
|
428
|
+
else datetime.now(timezone.utc)
|
|
429
|
+
),
|
|
430
|
+
)
|
|
431
|
+
)
|
|
432
|
+
return relationships
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ALMA Graph Memory - In-Memory Backend.
|
|
3
|
+
|
|
4
|
+
In-memory implementation of the GraphBackend interface for testing and development.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from typing import Dict, List, Optional, Set
|
|
9
|
+
|
|
10
|
+
from alma.graph.base import GraphBackend
|
|
11
|
+
from alma.graph.store import Entity, Relationship
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class InMemoryBackend(GraphBackend):
|
|
17
|
+
"""
|
|
18
|
+
In-memory graph database backend.
|
|
19
|
+
|
|
20
|
+
Suitable for testing, development, and small-scale use cases
|
|
21
|
+
where persistence is not required.
|
|
22
|
+
|
|
23
|
+
No external dependencies required.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self):
|
|
27
|
+
"""Initialize empty in-memory storage."""
|
|
28
|
+
self._entities: Dict[str, Entity] = {}
|
|
29
|
+
self._relationships: Dict[str, Relationship] = {}
|
|
30
|
+
self._outgoing: Dict[str, List[str]] = {} # entity_id -> [rel_ids]
|
|
31
|
+
self._incoming: Dict[str, List[str]] = {} # entity_id -> [rel_ids]
|
|
32
|
+
|
|
33
|
+
def add_entity(self, entity: Entity) -> str:
|
|
34
|
+
"""Add or update an entity."""
|
|
35
|
+
self._entities[entity.id] = entity
|
|
36
|
+
if entity.id not in self._outgoing:
|
|
37
|
+
self._outgoing[entity.id] = []
|
|
38
|
+
if entity.id not in self._incoming:
|
|
39
|
+
self._incoming[entity.id] = []
|
|
40
|
+
return entity.id
|
|
41
|
+
|
|
42
|
+
def add_relationship(self, relationship: Relationship) -> str:
|
|
43
|
+
"""Add or update a relationship."""
|
|
44
|
+
self._relationships[relationship.id] = relationship
|
|
45
|
+
|
|
46
|
+
if relationship.source_id not in self._outgoing:
|
|
47
|
+
self._outgoing[relationship.source_id] = []
|
|
48
|
+
if relationship.id not in self._outgoing[relationship.source_id]:
|
|
49
|
+
self._outgoing[relationship.source_id].append(relationship.id)
|
|
50
|
+
|
|
51
|
+
if relationship.target_id not in self._incoming:
|
|
52
|
+
self._incoming[relationship.target_id] = []
|
|
53
|
+
if relationship.id not in self._incoming[relationship.target_id]:
|
|
54
|
+
self._incoming[relationship.target_id].append(relationship.id)
|
|
55
|
+
|
|
56
|
+
return relationship.id
|
|
57
|
+
|
|
58
|
+
def get_entity(self, entity_id: str) -> Optional[Entity]:
|
|
59
|
+
"""Get an entity by ID."""
|
|
60
|
+
return self._entities.get(entity_id)
|
|
61
|
+
|
|
62
|
+
def get_entities(
|
|
63
|
+
self,
|
|
64
|
+
entity_type: Optional[str] = None,
|
|
65
|
+
project_id: Optional[str] = None,
|
|
66
|
+
agent: Optional[str] = None,
|
|
67
|
+
limit: int = 100,
|
|
68
|
+
) -> List[Entity]:
|
|
69
|
+
"""Get entities with optional filtering."""
|
|
70
|
+
results = []
|
|
71
|
+
for entity in self._entities.values():
|
|
72
|
+
if entity_type and entity.entity_type != entity_type:
|
|
73
|
+
continue
|
|
74
|
+
if project_id and entity.properties.get("project_id") != project_id:
|
|
75
|
+
continue
|
|
76
|
+
if agent and entity.properties.get("agent") != agent:
|
|
77
|
+
continue
|
|
78
|
+
results.append(entity)
|
|
79
|
+
if len(results) >= limit:
|
|
80
|
+
break
|
|
81
|
+
return results
|
|
82
|
+
|
|
83
|
+
def get_relationships(self, entity_id: str) -> List[Relationship]:
|
|
84
|
+
"""Get all relationships for an entity (both directions)."""
|
|
85
|
+
rel_ids: Set[str] = set()
|
|
86
|
+
rel_ids.update(self._outgoing.get(entity_id, []))
|
|
87
|
+
rel_ids.update(self._incoming.get(entity_id, []))
|
|
88
|
+
|
|
89
|
+
return [
|
|
90
|
+
self._relationships[rid] for rid in rel_ids if rid in self._relationships
|
|
91
|
+
]
|
|
92
|
+
|
|
93
|
+
def search_entities(
|
|
94
|
+
self,
|
|
95
|
+
query: str,
|
|
96
|
+
embedding: Optional[List[float]] = None,
|
|
97
|
+
top_k: int = 10,
|
|
98
|
+
) -> List[Entity]:
|
|
99
|
+
"""
|
|
100
|
+
Search for entities by name.
|
|
101
|
+
|
|
102
|
+
Note: Vector similarity search is not implemented for in-memory backend.
|
|
103
|
+
Falls back to case-insensitive text search.
|
|
104
|
+
"""
|
|
105
|
+
query_lower = query.lower()
|
|
106
|
+
results = []
|
|
107
|
+
for entity in self._entities.values():
|
|
108
|
+
if query_lower in entity.name.lower():
|
|
109
|
+
results.append(entity)
|
|
110
|
+
if len(results) >= top_k:
|
|
111
|
+
break
|
|
112
|
+
return results
|
|
113
|
+
|
|
114
|
+
def delete_entity(self, entity_id: str) -> bool:
|
|
115
|
+
"""Delete an entity and all its relationships."""
|
|
116
|
+
if entity_id not in self._entities:
|
|
117
|
+
return False
|
|
118
|
+
|
|
119
|
+
# Delete outgoing relationships
|
|
120
|
+
for rel_id in list(self._outgoing.get(entity_id, [])):
|
|
121
|
+
if rel_id in self._relationships:
|
|
122
|
+
rel = self._relationships[rel_id]
|
|
123
|
+
# Remove from target's incoming
|
|
124
|
+
if rel.target_id in self._incoming:
|
|
125
|
+
if rel_id in self._incoming[rel.target_id]:
|
|
126
|
+
self._incoming[rel.target_id].remove(rel_id)
|
|
127
|
+
del self._relationships[rel_id]
|
|
128
|
+
|
|
129
|
+
# Delete incoming relationships
|
|
130
|
+
for rel_id in list(self._incoming.get(entity_id, [])):
|
|
131
|
+
if rel_id in self._relationships:
|
|
132
|
+
rel = self._relationships[rel_id]
|
|
133
|
+
# Remove from source's outgoing
|
|
134
|
+
if rel.source_id in self._outgoing:
|
|
135
|
+
if rel_id in self._outgoing[rel.source_id]:
|
|
136
|
+
self._outgoing[rel.source_id].remove(rel_id)
|
|
137
|
+
del self._relationships[rel_id]
|
|
138
|
+
|
|
139
|
+
# Delete entity
|
|
140
|
+
del self._entities[entity_id]
|
|
141
|
+
self._outgoing.pop(entity_id, None)
|
|
142
|
+
self._incoming.pop(entity_id, None)
|
|
143
|
+
|
|
144
|
+
return True
|
|
145
|
+
|
|
146
|
+
def delete_relationship(self, relationship_id: str) -> bool:
|
|
147
|
+
"""Delete a specific relationship by ID."""
|
|
148
|
+
if relationship_id not in self._relationships:
|
|
149
|
+
return False
|
|
150
|
+
|
|
151
|
+
rel = self._relationships[relationship_id]
|
|
152
|
+
|
|
153
|
+
# Remove from source's outgoing
|
|
154
|
+
if rel.source_id in self._outgoing:
|
|
155
|
+
if relationship_id in self._outgoing[rel.source_id]:
|
|
156
|
+
self._outgoing[rel.source_id].remove(relationship_id)
|
|
157
|
+
|
|
158
|
+
# Remove from target's incoming
|
|
159
|
+
if rel.target_id in self._incoming:
|
|
160
|
+
if relationship_id in self._incoming[rel.target_id]:
|
|
161
|
+
self._incoming[rel.target_id].remove(relationship_id)
|
|
162
|
+
|
|
163
|
+
del self._relationships[relationship_id]
|
|
164
|
+
return True
|
|
165
|
+
|
|
166
|
+
def close(self) -> None:
|
|
167
|
+
"""Clear all data (no-op for in-memory backend)."""
|
|
168
|
+
# In-memory backend doesn't need explicit cleanup
|
|
169
|
+
# but we can optionally clear data
|
|
170
|
+
pass
|
|
171
|
+
|
|
172
|
+
def clear(self) -> None:
|
|
173
|
+
"""Clear all stored data."""
|
|
174
|
+
self._entities.clear()
|
|
175
|
+
self._relationships.clear()
|
|
176
|
+
self._outgoing.clear()
|
|
177
|
+
self._incoming.clear()
|
|
178
|
+
|
|
179
|
+
# Additional methods for compatibility with existing GraphStore API
|
|
180
|
+
|
|
181
|
+
def find_entities(
|
|
182
|
+
self,
|
|
183
|
+
name: Optional[str] = None,
|
|
184
|
+
entity_type: Optional[str] = None,
|
|
185
|
+
limit: int = 10,
|
|
186
|
+
) -> List[Entity]:
|
|
187
|
+
"""
|
|
188
|
+
Find entities by name or type.
|
|
189
|
+
|
|
190
|
+
This method provides compatibility with the existing GraphStore API.
|
|
191
|
+
"""
|
|
192
|
+
results = []
|
|
193
|
+
for entity in self._entities.values():
|
|
194
|
+
if name and name.lower() not in entity.name.lower():
|
|
195
|
+
continue
|
|
196
|
+
if entity_type and entity.entity_type != entity_type:
|
|
197
|
+
continue
|
|
198
|
+
results.append(entity)
|
|
199
|
+
if len(results) >= limit:
|
|
200
|
+
break
|
|
201
|
+
return results
|
|
202
|
+
|
|
203
|
+
def get_relationships_directional(
|
|
204
|
+
self,
|
|
205
|
+
entity_id: str,
|
|
206
|
+
direction: str = "both",
|
|
207
|
+
relation_type: Optional[str] = None,
|
|
208
|
+
) -> List[Relationship]:
|
|
209
|
+
"""
|
|
210
|
+
Get relationships for an entity with direction control.
|
|
211
|
+
|
|
212
|
+
This method provides compatibility with the existing GraphStore API.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
entity_id: The entity ID.
|
|
216
|
+
direction: "outgoing", "incoming", or "both".
|
|
217
|
+
relation_type: Optional filter by relationship type.
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
List of matching relationships.
|
|
221
|
+
"""
|
|
222
|
+
rel_ids: Set[str] = set()
|
|
223
|
+
|
|
224
|
+
if direction in ("outgoing", "both"):
|
|
225
|
+
rel_ids.update(self._outgoing.get(entity_id, []))
|
|
226
|
+
if direction in ("incoming", "both"):
|
|
227
|
+
rel_ids.update(self._incoming.get(entity_id, []))
|
|
228
|
+
|
|
229
|
+
results = []
|
|
230
|
+
for rel_id in rel_ids:
|
|
231
|
+
rel = self._relationships.get(rel_id)
|
|
232
|
+
if rel:
|
|
233
|
+
if relation_type and rel.relation_type != relation_type:
|
|
234
|
+
continue
|
|
235
|
+
results.append(rel)
|
|
236
|
+
return results
|