alma-memory 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alma/__init__.py +296 -194
- alma/compression/__init__.py +33 -0
- alma/compression/pipeline.py +980 -0
- alma/confidence/__init__.py +47 -47
- alma/confidence/engine.py +540 -540
- alma/confidence/types.py +351 -351
- alma/config/loader.py +157 -157
- alma/consolidation/__init__.py +23 -23
- alma/consolidation/engine.py +678 -678
- alma/consolidation/prompts.py +84 -84
- alma/core.py +1189 -322
- alma/domains/__init__.py +30 -30
- alma/domains/factory.py +359 -359
- alma/domains/schemas.py +448 -448
- alma/domains/types.py +272 -272
- alma/events/__init__.py +75 -75
- alma/events/emitter.py +285 -284
- alma/events/storage_mixin.py +246 -246
- alma/events/types.py +126 -126
- alma/events/webhook.py +425 -425
- alma/exceptions.py +49 -49
- alma/extraction/__init__.py +31 -31
- alma/extraction/auto_learner.py +265 -264
- alma/extraction/extractor.py +420 -420
- alma/graph/__init__.py +106 -81
- alma/graph/backends/__init__.py +32 -18
- alma/graph/backends/kuzu.py +624 -0
- alma/graph/backends/memgraph.py +432 -0
- alma/graph/backends/memory.py +236 -236
- alma/graph/backends/neo4j.py +417 -417
- alma/graph/base.py +159 -159
- alma/graph/extraction.py +198 -198
- alma/graph/store.py +860 -860
- alma/harness/__init__.py +35 -35
- alma/harness/base.py +386 -386
- alma/harness/domains.py +705 -705
- alma/initializer/__init__.py +37 -37
- alma/initializer/initializer.py +418 -418
- alma/initializer/types.py +250 -250
- alma/integration/__init__.py +62 -62
- alma/integration/claude_agents.py +444 -432
- alma/integration/helena.py +423 -423
- alma/integration/victor.py +471 -471
- alma/learning/__init__.py +101 -86
- alma/learning/decay.py +878 -0
- alma/learning/forgetting.py +1446 -1446
- alma/learning/heuristic_extractor.py +390 -390
- alma/learning/protocols.py +374 -374
- alma/learning/validation.py +346 -346
- alma/mcp/__init__.py +123 -45
- alma/mcp/__main__.py +156 -156
- alma/mcp/resources.py +122 -122
- alma/mcp/server.py +955 -591
- alma/mcp/tools.py +3254 -511
- alma/observability/__init__.py +91 -0
- alma/observability/config.py +302 -0
- alma/observability/guidelines.py +170 -0
- alma/observability/logging.py +424 -0
- alma/observability/metrics.py +583 -0
- alma/observability/tracing.py +440 -0
- alma/progress/__init__.py +21 -21
- alma/progress/tracker.py +607 -607
- alma/progress/types.py +250 -250
- alma/retrieval/__init__.py +134 -53
- alma/retrieval/budget.py +525 -0
- alma/retrieval/cache.py +1304 -1061
- alma/retrieval/embeddings.py +202 -202
- alma/retrieval/engine.py +850 -366
- alma/retrieval/modes.py +365 -0
- alma/retrieval/progressive.py +560 -0
- alma/retrieval/scoring.py +344 -344
- alma/retrieval/trust_scoring.py +637 -0
- alma/retrieval/verification.py +797 -0
- alma/session/__init__.py +19 -19
- alma/session/manager.py +442 -399
- alma/session/types.py +288 -288
- alma/storage/__init__.py +101 -61
- alma/storage/archive.py +233 -0
- alma/storage/azure_cosmos.py +1259 -1048
- alma/storage/base.py +1083 -525
- alma/storage/chroma.py +1443 -1443
- alma/storage/constants.py +103 -0
- alma/storage/file_based.py +614 -619
- alma/storage/migrations/__init__.py +21 -0
- alma/storage/migrations/base.py +321 -0
- alma/storage/migrations/runner.py +323 -0
- alma/storage/migrations/version_stores.py +337 -0
- alma/storage/migrations/versions/__init__.py +11 -0
- alma/storage/migrations/versions/v1_0_0.py +373 -0
- alma/storage/migrations/versions/v1_1_0_workflow_context.py +551 -0
- alma/storage/pinecone.py +1080 -1080
- alma/storage/postgresql.py +1948 -1452
- alma/storage/qdrant.py +1306 -1306
- alma/storage/sqlite_local.py +3041 -1358
- alma/testing/__init__.py +46 -0
- alma/testing/factories.py +301 -0
- alma/testing/mocks.py +389 -0
- alma/types.py +292 -264
- alma/utils/__init__.py +19 -0
- alma/utils/tokenizer.py +521 -0
- alma/workflow/__init__.py +83 -0
- alma/workflow/artifacts.py +170 -0
- alma/workflow/checkpoint.py +311 -0
- alma/workflow/context.py +228 -0
- alma/workflow/outcomes.py +189 -0
- alma/workflow/reducers.py +393 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/METADATA +244 -72
- alma_memory-0.7.0.dist-info/RECORD +112 -0
- alma_memory-0.5.0.dist-info/RECORD +0 -76
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/WHEEL +0 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ALMA Graph Memory - Memgraph Backend.
|
|
3
|
+
|
|
4
|
+
Memgraph implementation of the GraphBackend interface.
|
|
5
|
+
Memgraph is compatible with Neo4j's Bolt protocol, so the neo4j Python driver works with it.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from typing import Any, Dict, List, Optional
|
|
12
|
+
|
|
13
|
+
from alma.graph.base import GraphBackend
|
|
14
|
+
from alma.graph.store import Entity, Relationship
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MemgraphBackend(GraphBackend):
|
|
20
|
+
"""
|
|
21
|
+
Memgraph graph database backend.
|
|
22
|
+
|
|
23
|
+
Memgraph is an in-memory graph database compatible with Neo4j's Bolt protocol.
|
|
24
|
+
This backend uses the neo4j Python driver.
|
|
25
|
+
|
|
26
|
+
Requires neo4j Python driver: pip install neo4j
|
|
27
|
+
|
|
28
|
+
Example usage:
|
|
29
|
+
backend = MemgraphBackend(
|
|
30
|
+
uri="bolt://localhost:7687",
|
|
31
|
+
username="",
|
|
32
|
+
password=""
|
|
33
|
+
)
|
|
34
|
+
backend.add_entity(entity)
|
|
35
|
+
backend.close()
|
|
36
|
+
|
|
37
|
+
Note: Memgraph typically doesn't require authentication by default,
|
|
38
|
+
but it can be configured. Use empty strings for username/password if
|
|
39
|
+
authentication is disabled.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
def __init__(
|
|
43
|
+
self,
|
|
44
|
+
uri: str = "bolt://localhost:7687",
|
|
45
|
+
username: str = "",
|
|
46
|
+
password: str = "",
|
|
47
|
+
database: str = "memgraph",
|
|
48
|
+
):
|
|
49
|
+
"""
|
|
50
|
+
Initialize Memgraph connection.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
uri: Memgraph connection URI (bolt://)
|
|
54
|
+
username: Database username (empty if auth disabled)
|
|
55
|
+
password: Database password (empty if auth disabled)
|
|
56
|
+
database: Database name (default: "memgraph")
|
|
57
|
+
"""
|
|
58
|
+
self.uri = uri
|
|
59
|
+
self.username = username
|
|
60
|
+
self.password = password
|
|
61
|
+
self.database = database
|
|
62
|
+
self._driver = None
|
|
63
|
+
|
|
64
|
+
def _get_driver(self):
|
|
65
|
+
"""Lazy initialization of Memgraph driver (via neo4j package)."""
|
|
66
|
+
if self._driver is None:
|
|
67
|
+
try:
|
|
68
|
+
from neo4j import GraphDatabase
|
|
69
|
+
|
|
70
|
+
# Memgraph uses the same Bolt protocol as Neo4j
|
|
71
|
+
if self.username and self.password:
|
|
72
|
+
self._driver = GraphDatabase.driver(
|
|
73
|
+
self.uri,
|
|
74
|
+
auth=(self.username, self.password),
|
|
75
|
+
)
|
|
76
|
+
else:
|
|
77
|
+
# No authentication
|
|
78
|
+
self._driver = GraphDatabase.driver(self.uri)
|
|
79
|
+
except ImportError as err:
|
|
80
|
+
raise ImportError(
|
|
81
|
+
"neo4j package required for Memgraph graph backend. "
|
|
82
|
+
"Install with: pip install neo4j"
|
|
83
|
+
) from err
|
|
84
|
+
return self._driver
|
|
85
|
+
|
|
86
|
+
def _run_query(self, query: str, parameters: Optional[Dict] = None) -> List[Dict]:
|
|
87
|
+
"""Execute a Cypher query."""
|
|
88
|
+
driver = self._get_driver()
|
|
89
|
+
# Memgraph doesn't use the database parameter in the same way as Neo4j
|
|
90
|
+
# Most Memgraph setups use a single database
|
|
91
|
+
with driver.session() as session:
|
|
92
|
+
result = session.run(query, parameters or {})
|
|
93
|
+
return [dict(record) for record in result]
|
|
94
|
+
|
|
95
|
+
def add_entity(self, entity: Entity) -> str:
|
|
96
|
+
"""Add or update an entity in Memgraph."""
|
|
97
|
+
# Extract project_id and agent from properties if present
|
|
98
|
+
properties = entity.properties.copy()
|
|
99
|
+
project_id = properties.pop("project_id", None)
|
|
100
|
+
agent = properties.pop("agent", None)
|
|
101
|
+
|
|
102
|
+
query = """
|
|
103
|
+
MERGE (e:Entity {id: $id})
|
|
104
|
+
SET e.name = $name,
|
|
105
|
+
e.entity_type = $entity_type,
|
|
106
|
+
e.properties = $properties,
|
|
107
|
+
e.created_at = $created_at
|
|
108
|
+
"""
|
|
109
|
+
params = {
|
|
110
|
+
"id": entity.id,
|
|
111
|
+
"name": entity.name,
|
|
112
|
+
"entity_type": entity.entity_type,
|
|
113
|
+
"properties": json.dumps(properties),
|
|
114
|
+
"created_at": entity.created_at.isoformat(),
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
# Add optional fields if present
|
|
118
|
+
if project_id:
|
|
119
|
+
query += ", e.project_id = $project_id"
|
|
120
|
+
params["project_id"] = project_id
|
|
121
|
+
if agent:
|
|
122
|
+
query += ", e.agent = $agent"
|
|
123
|
+
params["agent"] = agent
|
|
124
|
+
|
|
125
|
+
query += " RETURN e.id as id"
|
|
126
|
+
|
|
127
|
+
result = self._run_query(query, params)
|
|
128
|
+
return result[0]["id"] if result else entity.id
|
|
129
|
+
|
|
130
|
+
def add_relationship(self, relationship: Relationship) -> str:
|
|
131
|
+
"""Add or update a relationship in Memgraph."""
|
|
132
|
+
# Sanitize relationship type for Cypher (remove special characters)
|
|
133
|
+
rel_type = (
|
|
134
|
+
relationship.relation_type.replace("-", "_").replace(" ", "_").upper()
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
query = f"""
|
|
138
|
+
MATCH (source:Entity {{id: $source_id}})
|
|
139
|
+
MATCH (target:Entity {{id: $target_id}})
|
|
140
|
+
MERGE (source)-[r:{rel_type}]->(target)
|
|
141
|
+
SET r.id = $id,
|
|
142
|
+
r.properties = $properties,
|
|
143
|
+
r.confidence = $confidence,
|
|
144
|
+
r.created_at = $created_at
|
|
145
|
+
RETURN r.id as id
|
|
146
|
+
"""
|
|
147
|
+
result = self._run_query(
|
|
148
|
+
query,
|
|
149
|
+
{
|
|
150
|
+
"id": relationship.id,
|
|
151
|
+
"source_id": relationship.source_id,
|
|
152
|
+
"target_id": relationship.target_id,
|
|
153
|
+
"properties": json.dumps(relationship.properties),
|
|
154
|
+
"confidence": relationship.confidence,
|
|
155
|
+
"created_at": relationship.created_at.isoformat(),
|
|
156
|
+
},
|
|
157
|
+
)
|
|
158
|
+
return result[0]["id"] if result else relationship.id
|
|
159
|
+
|
|
160
|
+
def get_entity(self, entity_id: str) -> Optional[Entity]:
|
|
161
|
+
"""Get an entity by ID."""
|
|
162
|
+
query = """
|
|
163
|
+
MATCH (e:Entity {id: $id})
|
|
164
|
+
RETURN e.id as id, e.name as name, e.entity_type as entity_type,
|
|
165
|
+
e.properties as properties, e.created_at as created_at,
|
|
166
|
+
e.project_id as project_id, e.agent as agent
|
|
167
|
+
"""
|
|
168
|
+
result = self._run_query(query, {"id": entity_id})
|
|
169
|
+
if not result:
|
|
170
|
+
return None
|
|
171
|
+
|
|
172
|
+
r = result[0]
|
|
173
|
+
properties = json.loads(r["properties"]) if r["properties"] else {}
|
|
174
|
+
|
|
175
|
+
# Add project_id and agent back to properties if present
|
|
176
|
+
if r.get("project_id"):
|
|
177
|
+
properties["project_id"] = r["project_id"]
|
|
178
|
+
if r.get("agent"):
|
|
179
|
+
properties["agent"] = r["agent"]
|
|
180
|
+
|
|
181
|
+
return Entity(
|
|
182
|
+
id=r["id"],
|
|
183
|
+
name=r["name"],
|
|
184
|
+
entity_type=r["entity_type"],
|
|
185
|
+
properties=properties,
|
|
186
|
+
created_at=(
|
|
187
|
+
datetime.fromisoformat(r["created_at"])
|
|
188
|
+
if r["created_at"]
|
|
189
|
+
else datetime.now(timezone.utc)
|
|
190
|
+
),
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
def get_entities(
|
|
194
|
+
self,
|
|
195
|
+
entity_type: Optional[str] = None,
|
|
196
|
+
project_id: Optional[str] = None,
|
|
197
|
+
agent: Optional[str] = None,
|
|
198
|
+
limit: int = 100,
|
|
199
|
+
) -> List[Entity]:
|
|
200
|
+
"""Get entities with optional filtering."""
|
|
201
|
+
conditions = []
|
|
202
|
+
params: Dict[str, Any] = {"limit": limit}
|
|
203
|
+
|
|
204
|
+
if entity_type:
|
|
205
|
+
conditions.append("e.entity_type = $entity_type")
|
|
206
|
+
params["entity_type"] = entity_type
|
|
207
|
+
if project_id:
|
|
208
|
+
conditions.append("e.project_id = $project_id")
|
|
209
|
+
params["project_id"] = project_id
|
|
210
|
+
if agent:
|
|
211
|
+
conditions.append("e.agent = $agent")
|
|
212
|
+
params["agent"] = agent
|
|
213
|
+
|
|
214
|
+
where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
|
|
215
|
+
|
|
216
|
+
query = f"""
|
|
217
|
+
MATCH (e:Entity)
|
|
218
|
+
{where_clause}
|
|
219
|
+
RETURN e.id as id, e.name as name, e.entity_type as entity_type,
|
|
220
|
+
e.properties as properties, e.created_at as created_at,
|
|
221
|
+
e.project_id as project_id, e.agent as agent
|
|
222
|
+
LIMIT $limit
|
|
223
|
+
"""
|
|
224
|
+
|
|
225
|
+
results = self._run_query(query, params)
|
|
226
|
+
entities = []
|
|
227
|
+
for r in results:
|
|
228
|
+
properties = json.loads(r["properties"]) if r["properties"] else {}
|
|
229
|
+
if r.get("project_id"):
|
|
230
|
+
properties["project_id"] = r["project_id"]
|
|
231
|
+
if r.get("agent"):
|
|
232
|
+
properties["agent"] = r["agent"]
|
|
233
|
+
|
|
234
|
+
entities.append(
|
|
235
|
+
Entity(
|
|
236
|
+
id=r["id"],
|
|
237
|
+
name=r["name"],
|
|
238
|
+
entity_type=r["entity_type"],
|
|
239
|
+
properties=properties,
|
|
240
|
+
created_at=(
|
|
241
|
+
datetime.fromisoformat(r["created_at"])
|
|
242
|
+
if r["created_at"]
|
|
243
|
+
else datetime.now(timezone.utc)
|
|
244
|
+
),
|
|
245
|
+
)
|
|
246
|
+
)
|
|
247
|
+
return entities
|
|
248
|
+
|
|
249
|
+
def get_relationships(self, entity_id: str) -> List[Relationship]:
|
|
250
|
+
"""Get all relationships for an entity (both directions)."""
|
|
251
|
+
query = """
|
|
252
|
+
MATCH (e:Entity {id: $entity_id})-[r]-(other:Entity)
|
|
253
|
+
RETURN r.id as id,
|
|
254
|
+
CASE WHEN startNode(r).id = $entity_id THEN e.id ELSE other.id END as source_id,
|
|
255
|
+
CASE WHEN endNode(r).id = $entity_id THEN e.id ELSE other.id END as target_id,
|
|
256
|
+
type(r) as relation_type, r.properties as properties,
|
|
257
|
+
r.confidence as confidence, r.created_at as created_at
|
|
258
|
+
"""
|
|
259
|
+
|
|
260
|
+
results = self._run_query(query, {"entity_id": entity_id})
|
|
261
|
+
relationships = []
|
|
262
|
+
for r in results:
|
|
263
|
+
rel_id = (
|
|
264
|
+
r["id"] or f"{r['source_id']}-{r['relation_type']}-{r['target_id']}"
|
|
265
|
+
)
|
|
266
|
+
relationships.append(
|
|
267
|
+
Relationship(
|
|
268
|
+
id=rel_id,
|
|
269
|
+
source_id=r["source_id"],
|
|
270
|
+
target_id=r["target_id"],
|
|
271
|
+
relation_type=r["relation_type"],
|
|
272
|
+
properties=json.loads(r["properties"]) if r["properties"] else {},
|
|
273
|
+
confidence=r["confidence"] or 1.0,
|
|
274
|
+
created_at=(
|
|
275
|
+
datetime.fromisoformat(r["created_at"])
|
|
276
|
+
if r["created_at"]
|
|
277
|
+
else datetime.now(timezone.utc)
|
|
278
|
+
),
|
|
279
|
+
)
|
|
280
|
+
)
|
|
281
|
+
return relationships
|
|
282
|
+
|
|
283
|
+
def search_entities(
|
|
284
|
+
self,
|
|
285
|
+
query: str,
|
|
286
|
+
embedding: Optional[List[float]] = None,
|
|
287
|
+
top_k: int = 10,
|
|
288
|
+
) -> List[Entity]:
|
|
289
|
+
"""
|
|
290
|
+
Search for entities by name.
|
|
291
|
+
|
|
292
|
+
Note: Vector similarity search requires Memgraph MAGE with vector operations.
|
|
293
|
+
Falls back to text search if embedding is provided but vector index
|
|
294
|
+
is not available.
|
|
295
|
+
"""
|
|
296
|
+
# For now, we do text-based search
|
|
297
|
+
# Vector search can be added when Memgraph MAGE is configured
|
|
298
|
+
cypher = """
|
|
299
|
+
MATCH (e:Entity)
|
|
300
|
+
WHERE toLower(e.name) CONTAINS toLower($query)
|
|
301
|
+
RETURN e.id as id, e.name as name, e.entity_type as entity_type,
|
|
302
|
+
e.properties as properties, e.created_at as created_at,
|
|
303
|
+
e.project_id as project_id, e.agent as agent
|
|
304
|
+
LIMIT $limit
|
|
305
|
+
"""
|
|
306
|
+
|
|
307
|
+
results = self._run_query(cypher, {"query": query, "limit": top_k})
|
|
308
|
+
entities = []
|
|
309
|
+
for r in results:
|
|
310
|
+
properties = json.loads(r["properties"]) if r["properties"] else {}
|
|
311
|
+
if r.get("project_id"):
|
|
312
|
+
properties["project_id"] = r["project_id"]
|
|
313
|
+
if r.get("agent"):
|
|
314
|
+
properties["agent"] = r["agent"]
|
|
315
|
+
|
|
316
|
+
entities.append(
|
|
317
|
+
Entity(
|
|
318
|
+
id=r["id"],
|
|
319
|
+
name=r["name"],
|
|
320
|
+
entity_type=r["entity_type"],
|
|
321
|
+
properties=properties,
|
|
322
|
+
created_at=(
|
|
323
|
+
datetime.fromisoformat(r["created_at"])
|
|
324
|
+
if r["created_at"]
|
|
325
|
+
else datetime.now(timezone.utc)
|
|
326
|
+
),
|
|
327
|
+
)
|
|
328
|
+
)
|
|
329
|
+
return entities
|
|
330
|
+
|
|
331
|
+
def delete_entity(self, entity_id: str) -> bool:
|
|
332
|
+
"""Delete an entity and its relationships."""
|
|
333
|
+
query = """
|
|
334
|
+
MATCH (e:Entity {id: $id})
|
|
335
|
+
DETACH DELETE e
|
|
336
|
+
RETURN count(e) as deleted
|
|
337
|
+
"""
|
|
338
|
+
result = self._run_query(query, {"id": entity_id})
|
|
339
|
+
return result[0]["deleted"] > 0 if result else False
|
|
340
|
+
|
|
341
|
+
def delete_relationship(self, relationship_id: str) -> bool:
|
|
342
|
+
"""Delete a specific relationship by ID."""
|
|
343
|
+
query = """
|
|
344
|
+
MATCH ()-[r]-()
|
|
345
|
+
WHERE r.id = $id
|
|
346
|
+
DELETE r
|
|
347
|
+
RETURN count(r) as deleted
|
|
348
|
+
"""
|
|
349
|
+
result = self._run_query(query, {"id": relationship_id})
|
|
350
|
+
return result[0]["deleted"] > 0 if result else False
|
|
351
|
+
|
|
352
|
+
def close(self) -> None:
|
|
353
|
+
"""Close the Memgraph driver connection."""
|
|
354
|
+
if self._driver:
|
|
355
|
+
self._driver.close()
|
|
356
|
+
self._driver = None
|
|
357
|
+
|
|
358
|
+
# Additional methods for compatibility with existing GraphStore API
|
|
359
|
+
|
|
360
|
+
def find_entities(
|
|
361
|
+
self,
|
|
362
|
+
name: Optional[str] = None,
|
|
363
|
+
entity_type: Optional[str] = None,
|
|
364
|
+
limit: int = 10,
|
|
365
|
+
) -> List[Entity]:
|
|
366
|
+
"""
|
|
367
|
+
Find entities by name or type.
|
|
368
|
+
|
|
369
|
+
This method provides compatibility with the existing GraphStore API.
|
|
370
|
+
"""
|
|
371
|
+
if name:
|
|
372
|
+
return self.search_entities(query=name, top_k=limit)
|
|
373
|
+
|
|
374
|
+
return self.get_entities(entity_type=entity_type, limit=limit)
|
|
375
|
+
|
|
376
|
+
def get_relationships_directional(
|
|
377
|
+
self,
|
|
378
|
+
entity_id: str,
|
|
379
|
+
direction: str = "both",
|
|
380
|
+
relation_type: Optional[str] = None,
|
|
381
|
+
) -> List[Relationship]:
|
|
382
|
+
"""
|
|
383
|
+
Get relationships for an entity with direction control.
|
|
384
|
+
|
|
385
|
+
This method provides compatibility with the existing GraphStore API.
|
|
386
|
+
|
|
387
|
+
Args:
|
|
388
|
+
entity_id: The entity ID.
|
|
389
|
+
direction: "outgoing", "incoming", or "both".
|
|
390
|
+
relation_type: Optional filter by relationship type.
|
|
391
|
+
|
|
392
|
+
Returns:
|
|
393
|
+
List of matching relationships.
|
|
394
|
+
"""
|
|
395
|
+
if direction == "outgoing":
|
|
396
|
+
pattern = "(e)-[r]->(other)"
|
|
397
|
+
elif direction == "incoming":
|
|
398
|
+
pattern = "(e)<-[r]-(other)"
|
|
399
|
+
else:
|
|
400
|
+
pattern = "(e)-[r]-(other)"
|
|
401
|
+
|
|
402
|
+
type_filter = f":{relation_type}" if relation_type else ""
|
|
403
|
+
|
|
404
|
+
query = f"""
|
|
405
|
+
MATCH (e:Entity {{id: $entity_id}}){pattern.replace("[r]", f"[r{type_filter}]")}
|
|
406
|
+
RETURN r.id as id, e.id as source_id, other.id as target_id,
|
|
407
|
+
type(r) as relation_type, r.properties as properties,
|
|
408
|
+
r.confidence as confidence, r.created_at as created_at
|
|
409
|
+
"""
|
|
410
|
+
|
|
411
|
+
results = self._run_query(query, {"entity_id": entity_id})
|
|
412
|
+
relationships = []
|
|
413
|
+
for r in results:
|
|
414
|
+
rel_id = (
|
|
415
|
+
r["id"] or f"{r['source_id']}-{r['relation_type']}-{r['target_id']}"
|
|
416
|
+
)
|
|
417
|
+
relationships.append(
|
|
418
|
+
Relationship(
|
|
419
|
+
id=rel_id,
|
|
420
|
+
source_id=r["source_id"],
|
|
421
|
+
target_id=r["target_id"],
|
|
422
|
+
relation_type=r["relation_type"],
|
|
423
|
+
properties=json.loads(r["properties"]) if r["properties"] else {},
|
|
424
|
+
confidence=r["confidence"] or 1.0,
|
|
425
|
+
created_at=(
|
|
426
|
+
datetime.fromisoformat(r["created_at"])
|
|
427
|
+
if r["created_at"]
|
|
428
|
+
else datetime.now(timezone.utc)
|
|
429
|
+
),
|
|
430
|
+
)
|
|
431
|
+
)
|
|
432
|
+
return relationships
|