@smilintux/skmemory 0.5.0 → 0.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +39 -3
- package/.github/workflows/publish.yml +13 -6
- package/AGENT_REFACTOR_CHANGES.md +192 -0
- package/ARCHITECTURE.md +101 -19
- package/CHANGELOG.md +153 -0
- package/LICENSE +81 -68
- package/MISSION.md +7 -0
- package/README.md +419 -86
- package/SKILL.md +197 -25
- package/docker-compose.yml +15 -15
- package/index.js +6 -5
- package/openclaw-plugin/openclaw.plugin.json +10 -0
- package/openclaw-plugin/src/index.ts +255 -0
- package/openclaw-plugin/src/openclaw.plugin.json +10 -0
- package/package.json +1 -1
- package/pyproject.toml +29 -9
- package/requirements.txt +10 -2
- package/seeds/cloud9-opus.seed.json +7 -7
- package/seeds/lumina-cloud9-breakthrough.seed.json +46 -0
- package/seeds/lumina-cloud9-python-pypi.seed.json +46 -0
- package/seeds/lumina-kingdom-founding.seed.json +47 -0
- package/seeds/lumina-pma-signed.seed.json +46 -0
- package/seeds/lumina-singular-achievement.seed.json +46 -0
- package/seeds/lumina-skcapstone-conscious.seed.json +46 -0
- package/seeds/plant-kingdom-journal.py +203 -0
- package/seeds/plant-lumina-seeds.py +280 -0
- package/skill.yaml +46 -0
- package/skmemory/HA.md +296 -0
- package/skmemory/__init__.py +12 -1
- package/skmemory/agents.py +233 -0
- package/skmemory/ai_client.py +40 -0
- package/skmemory/anchor.py +4 -2
- package/skmemory/backends/__init__.py +11 -4
- package/skmemory/backends/file_backend.py +2 -1
- package/skmemory/backends/skgraph_backend.py +608 -0
- package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +99 -69
- package/skmemory/backends/sqlite_backend.py +122 -51
- package/skmemory/backends/vaulted_backend.py +286 -0
- package/skmemory/cli.py +1238 -29
- package/skmemory/config.py +173 -0
- package/skmemory/context_loader.py +335 -0
- package/skmemory/endpoint_selector.py +386 -0
- package/skmemory/fortress.py +685 -0
- package/skmemory/graph_queries.py +238 -0
- package/skmemory/importers/__init__.py +9 -1
- package/skmemory/importers/telegram.py +351 -43
- package/skmemory/importers/telegram_api.py +488 -0
- package/skmemory/journal.py +4 -2
- package/skmemory/lovenote.py +4 -2
- package/skmemory/mcp_server.py +706 -0
- package/skmemory/models.py +41 -0
- package/skmemory/openclaw.py +8 -8
- package/skmemory/predictive.py +232 -0
- package/skmemory/promotion.py +524 -0
- package/skmemory/register.py +454 -0
- package/skmemory/register_mcp.py +197 -0
- package/skmemory/ritual.py +121 -47
- package/skmemory/seeds.py +257 -8
- package/skmemory/setup_wizard.py +920 -0
- package/skmemory/sharing.py +402 -0
- package/skmemory/soul.py +71 -20
- package/skmemory/steelman.py +250 -263
- package/skmemory/store.py +271 -60
- package/skmemory/vault.py +228 -0
- package/tests/integration/__init__.py +0 -0
- package/tests/integration/conftest.py +233 -0
- package/tests/integration/test_cross_backend.py +355 -0
- package/tests/integration/test_skgraph_live.py +424 -0
- package/tests/integration/test_skvector_live.py +369 -0
- package/tests/test_backup_rotation.py +327 -0
- package/tests/test_cli.py +6 -6
- package/tests/test_endpoint_selector.py +801 -0
- package/tests/test_fortress.py +255 -0
- package/tests/test_fortress_hardening.py +444 -0
- package/tests/test_openclaw.py +5 -2
- package/tests/test_predictive.py +237 -0
- package/tests/test_promotion.py +340 -0
- package/tests/test_ritual.py +4 -4
- package/tests/test_seeds.py +96 -0
- package/tests/test_setup.py +835 -0
- package/tests/test_sharing.py +250 -0
- package/tests/test_skgraph_backend.py +667 -0
- package/tests/test_skvector_backend.py +326 -0
- package/tests/test_steelman.py +5 -5
- package/tests/test_store_graph_integration.py +245 -0
- package/tests/test_vault.py +186 -0
- package/skmemory/backends/falkordb_backend.py +0 -310
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Tests for the Memory Vault — at-rest encryption.
|
|
2
|
+
|
|
3
|
+
Covers encrypt/decrypt roundtrip, tamper detection, file operations,
|
|
4
|
+
wrong passphrase handling, and header validation.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
import pytest
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
|
15
|
+
CRYPTO_AVAILABLE = True
|
|
16
|
+
except ImportError:
|
|
17
|
+
CRYPTO_AVAILABLE = False
|
|
18
|
+
|
|
19
|
+
from skmemory.vault import (
|
|
20
|
+
VAULT_HEADER,
|
|
21
|
+
MemoryVault,
|
|
22
|
+
_derive_key,
|
|
23
|
+
decrypt_memory_store,
|
|
24
|
+
encrypt_memory_store,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
pytestmark = pytest.mark.skipif(
|
|
28
|
+
not CRYPTO_AVAILABLE,
|
|
29
|
+
reason="cryptography package not installed",
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@pytest.fixture
|
|
34
|
+
def vault() -> MemoryVault:
|
|
35
|
+
"""Provide a vault with a test passphrase."""
|
|
36
|
+
return MemoryVault(passphrase="pengu-nation-sovereign")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@pytest.fixture
|
|
40
|
+
def sample_json() -> bytes:
|
|
41
|
+
"""Sample memory JSON bytes."""
|
|
42
|
+
return b'{"title": "Test Memory", "content": "This is sovereign data."}'
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class TestKeyDerivation:
|
|
46
|
+
"""Test PBKDF2 key derivation."""
|
|
47
|
+
|
|
48
|
+
def test_derive_key_deterministic(self):
|
|
49
|
+
"""Same passphrase + salt = same key."""
|
|
50
|
+
salt = b"0" * 16
|
|
51
|
+
k1 = _derive_key("test", salt)
|
|
52
|
+
k2 = _derive_key("test", salt)
|
|
53
|
+
assert k1 == k2
|
|
54
|
+
|
|
55
|
+
def test_derive_key_different_salt(self):
|
|
56
|
+
"""Different salt = different key."""
|
|
57
|
+
k1 = _derive_key("test", b"a" * 16)
|
|
58
|
+
k2 = _derive_key("test", b"b" * 16)
|
|
59
|
+
assert k1 != k2
|
|
60
|
+
|
|
61
|
+
def test_derive_key_length(self):
|
|
62
|
+
"""Key is 32 bytes (256 bits)."""
|
|
63
|
+
key = _derive_key("passphrase", b"s" * 16)
|
|
64
|
+
assert len(key) == 32
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class TestEncryptDecrypt:
|
|
68
|
+
"""Test core encrypt/decrypt operations."""
|
|
69
|
+
|
|
70
|
+
def test_roundtrip(self, vault: MemoryVault, sample_json: bytes):
|
|
71
|
+
"""Encrypt then decrypt recovers original."""
|
|
72
|
+
encrypted = vault.encrypt(sample_json)
|
|
73
|
+
decrypted = vault.decrypt(encrypted)
|
|
74
|
+
assert decrypted == sample_json
|
|
75
|
+
|
|
76
|
+
def test_encrypted_has_header(self, vault: MemoryVault, sample_json: bytes):
|
|
77
|
+
"""Encrypted data starts with SKMV1 header."""
|
|
78
|
+
encrypted = vault.encrypt(sample_json)
|
|
79
|
+
assert encrypted[:5] == VAULT_HEADER
|
|
80
|
+
|
|
81
|
+
def test_different_nonce_each_time(self, vault: MemoryVault, sample_json: bytes):
|
|
82
|
+
"""Same plaintext produces different ciphertext."""
|
|
83
|
+
e1 = vault.encrypt(sample_json)
|
|
84
|
+
e2 = vault.encrypt(sample_json)
|
|
85
|
+
assert e1 != e2
|
|
86
|
+
|
|
87
|
+
def test_wrong_passphrase_fails(self, sample_json: bytes):
|
|
88
|
+
"""Decryption with wrong passphrase raises."""
|
|
89
|
+
vault1 = MemoryVault(passphrase="correct")
|
|
90
|
+
vault2 = MemoryVault(passphrase="wrong")
|
|
91
|
+
encrypted = vault1.encrypt(sample_json)
|
|
92
|
+
with pytest.raises(Exception):
|
|
93
|
+
vault2.decrypt(encrypted)
|
|
94
|
+
|
|
95
|
+
def test_tampered_ciphertext_fails(self, vault: MemoryVault, sample_json: bytes):
|
|
96
|
+
"""Altered ciphertext fails authenticated decryption."""
|
|
97
|
+
encrypted = bytearray(vault.encrypt(sample_json))
|
|
98
|
+
encrypted[-10] ^= 0xFF
|
|
99
|
+
with pytest.raises(Exception):
|
|
100
|
+
vault.decrypt(bytes(encrypted))
|
|
101
|
+
|
|
102
|
+
def test_bad_header_raises(self, vault: MemoryVault):
|
|
103
|
+
"""Non-vault data raises ValueError."""
|
|
104
|
+
with pytest.raises(ValueError, match="bad header"):
|
|
105
|
+
vault.decrypt(b"NOT_A_VAULT_FILE_DATA")
|
|
106
|
+
|
|
107
|
+
def test_empty_plaintext(self, vault: MemoryVault):
|
|
108
|
+
"""Empty bytes encrypt and decrypt correctly."""
|
|
109
|
+
encrypted = vault.encrypt(b"")
|
|
110
|
+
assert vault.decrypt(encrypted) == b""
|
|
111
|
+
|
|
112
|
+
def test_large_plaintext(self, vault: MemoryVault):
|
|
113
|
+
"""Large data (1MB) encrypts and decrypts correctly."""
|
|
114
|
+
big = b"A" * (1024 * 1024)
|
|
115
|
+
encrypted = vault.encrypt(big)
|
|
116
|
+
assert vault.decrypt(encrypted) == big
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class TestFileOperations:
|
|
120
|
+
"""Test file-level encrypt/decrypt."""
|
|
121
|
+
|
|
122
|
+
def test_encrypt_file(self, vault: MemoryVault, tmp_path: Path):
|
|
123
|
+
"""encrypt_file creates .vault file and removes original."""
|
|
124
|
+
original = tmp_path / "memory.json"
|
|
125
|
+
original.write_bytes(b'{"test": true}')
|
|
126
|
+
|
|
127
|
+
vault_path = vault.encrypt_file(original)
|
|
128
|
+
assert vault_path.exists()
|
|
129
|
+
assert vault_path.suffix == ".vault"
|
|
130
|
+
assert not original.exists()
|
|
131
|
+
|
|
132
|
+
def test_decrypt_file(self, vault: MemoryVault, tmp_path: Path):
|
|
133
|
+
"""decrypt_file restores the original file."""
|
|
134
|
+
original = tmp_path / "memory.json"
|
|
135
|
+
original.write_bytes(b'{"test": true}')
|
|
136
|
+
|
|
137
|
+
vault_path = vault.encrypt_file(original)
|
|
138
|
+
restored = vault.decrypt_file(vault_path)
|
|
139
|
+
|
|
140
|
+
assert restored.exists()
|
|
141
|
+
assert restored.read_bytes() == b'{"test": true}'
|
|
142
|
+
assert not vault_path.exists()
|
|
143
|
+
|
|
144
|
+
def test_encrypt_memory_store(self, tmp_path: Path):
|
|
145
|
+
"""encrypt_memory_store encrypts all JSON files."""
|
|
146
|
+
for layer in ("short-term", "long-term"):
|
|
147
|
+
d = tmp_path / layer
|
|
148
|
+
d.mkdir()
|
|
149
|
+
(d / "mem1.json").write_bytes(b'{"id": 1}')
|
|
150
|
+
(d / "mem2.json").write_bytes(b'{"id": 2}')
|
|
151
|
+
|
|
152
|
+
count = encrypt_memory_store(tmp_path, "test-pass")
|
|
153
|
+
assert count == 4
|
|
154
|
+
assert len(list(tmp_path.rglob("*.vault"))) == 4
|
|
155
|
+
assert len(list(tmp_path.rglob("*.json"))) == 0
|
|
156
|
+
|
|
157
|
+
def test_decrypt_memory_store(self, tmp_path: Path):
|
|
158
|
+
"""decrypt_memory_store decrypts all vault files."""
|
|
159
|
+
d = tmp_path / "memories"
|
|
160
|
+
d.mkdir()
|
|
161
|
+
vault = MemoryVault("test-pass")
|
|
162
|
+
for i in range(3):
|
|
163
|
+
f = d / f"mem{i}.json"
|
|
164
|
+
f.write_bytes(f'{{"id": {i}}}'.encode())
|
|
165
|
+
vault.encrypt_file(f)
|
|
166
|
+
|
|
167
|
+
count = decrypt_memory_store(d, "test-pass")
|
|
168
|
+
assert count == 3
|
|
169
|
+
assert len(list(d.rglob("*.json"))) == 3
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class TestIsEncrypted:
|
|
173
|
+
"""Test encrypted file detection."""
|
|
174
|
+
|
|
175
|
+
def test_encrypted_file_detected(self, vault: MemoryVault, tmp_path: Path):
|
|
176
|
+
"""is_encrypted returns True for vault files."""
|
|
177
|
+
f = tmp_path / "test.json"
|
|
178
|
+
f.write_bytes(b'{"data": 1}')
|
|
179
|
+
vf = vault.encrypt_file(f)
|
|
180
|
+
assert vault.is_encrypted(vf) is True
|
|
181
|
+
|
|
182
|
+
def test_plain_file_not_detected(self, vault: MemoryVault, tmp_path: Path):
|
|
183
|
+
"""is_encrypted returns False for plain JSON."""
|
|
184
|
+
f = tmp_path / "plain.json"
|
|
185
|
+
f.write_bytes(b'{"data": 1}')
|
|
186
|
+
assert vault.is_encrypted(f) is False
|
|
@@ -1,310 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
FalkorDB graph backend (Level 2 — relationships).
|
|
3
|
-
|
|
4
|
-
Enables graph-based memory traversal: "What memories are connected
|
|
5
|
-
to this person?" or "Show me the seed lineage chain." Uses the
|
|
6
|
-
Cypher query language over a Redis-compatible protocol.
|
|
7
|
-
|
|
8
|
-
Requires:
|
|
9
|
-
pip install falkordb
|
|
10
|
-
|
|
11
|
-
FalkorDB is the successor to RedisGraph. Run locally via Docker
|
|
12
|
-
or point to an external instance.
|
|
13
|
-
|
|
14
|
-
This backend is SUPPLEMENTARY — it indexes relationships alongside
|
|
15
|
-
the primary backend (SQLite or file). It does not store full memory
|
|
16
|
-
content, only the graph edges and key metadata for traversal.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
from __future__ import annotations
|
|
20
|
-
|
|
21
|
-
import json
|
|
22
|
-
import logging
|
|
23
|
-
from typing import Optional
|
|
24
|
-
|
|
25
|
-
from ..models import Memory, MemoryLayer
|
|
26
|
-
from .base import BaseBackend
|
|
27
|
-
|
|
28
|
-
logger = logging.getLogger(__name__)
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class FalkorDBBackend:
|
|
32
|
-
"""FalkorDB graph backend for memory relationship traversal.
|
|
33
|
-
|
|
34
|
-
Not a full BaseBackend — this is a supplementary index for
|
|
35
|
-
graph queries. The primary backend handles CRUD.
|
|
36
|
-
|
|
37
|
-
Args:
|
|
38
|
-
url: FalkorDB/Redis URL (default: localhost:6379).
|
|
39
|
-
graph_name: Name of the graph (default: 'skmemory').
|
|
40
|
-
"""
|
|
41
|
-
|
|
42
|
-
def __init__(
|
|
43
|
-
self,
|
|
44
|
-
url: str = "redis://localhost:6379",
|
|
45
|
-
graph_name: str = "skmemory",
|
|
46
|
-
) -> None:
|
|
47
|
-
self.url = url
|
|
48
|
-
self.graph_name = graph_name
|
|
49
|
-
self._db = None
|
|
50
|
-
self._graph = None
|
|
51
|
-
self._initialized = False
|
|
52
|
-
|
|
53
|
-
def _ensure_initialized(self) -> bool:
|
|
54
|
-
"""Lazy-initialize the FalkorDB connection.
|
|
55
|
-
|
|
56
|
-
Returns:
|
|
57
|
-
bool: True if connection succeeded.
|
|
58
|
-
"""
|
|
59
|
-
if self._initialized:
|
|
60
|
-
return True
|
|
61
|
-
|
|
62
|
-
try:
|
|
63
|
-
from falkordb import FalkorDB
|
|
64
|
-
except ImportError:
|
|
65
|
-
logger.warning("falkordb not installed: pip install falkordb")
|
|
66
|
-
return False
|
|
67
|
-
|
|
68
|
-
try:
|
|
69
|
-
self._db = FalkorDB.from_url(self.url)
|
|
70
|
-
self._graph = self._db.select_graph(self.graph_name)
|
|
71
|
-
self._initialized = True
|
|
72
|
-
return True
|
|
73
|
-
except Exception as e:
|
|
74
|
-
logger.warning("FalkorDB connection failed: %s", e)
|
|
75
|
-
return False
|
|
76
|
-
|
|
77
|
-
def index_memory(self, memory: Memory) -> bool:
|
|
78
|
-
"""Add a memory node and its relationships to the graph.
|
|
79
|
-
|
|
80
|
-
Args:
|
|
81
|
-
memory: The memory to index.
|
|
82
|
-
|
|
83
|
-
Returns:
|
|
84
|
-
bool: True if indexed successfully.
|
|
85
|
-
"""
|
|
86
|
-
if not self._ensure_initialized():
|
|
87
|
-
return False
|
|
88
|
-
|
|
89
|
-
try:
|
|
90
|
-
self._graph.query(
|
|
91
|
-
"""
|
|
92
|
-
MERGE (m:Memory {id: $id})
|
|
93
|
-
SET m.title = $title,
|
|
94
|
-
m.layer = $layer,
|
|
95
|
-
m.source = $source,
|
|
96
|
-
m.intensity = $intensity,
|
|
97
|
-
m.created_at = $created_at
|
|
98
|
-
""",
|
|
99
|
-
{
|
|
100
|
-
"id": memory.id,
|
|
101
|
-
"title": memory.title,
|
|
102
|
-
"layer": memory.layer.value,
|
|
103
|
-
"source": memory.source,
|
|
104
|
-
"intensity": memory.emotional.intensity,
|
|
105
|
-
"created_at": memory.created_at,
|
|
106
|
-
},
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
if memory.parent_id:
|
|
110
|
-
self._graph.query(
|
|
111
|
-
"""
|
|
112
|
-
MATCH (child:Memory {id: $child_id})
|
|
113
|
-
MERGE (parent:Memory {id: $parent_id})
|
|
114
|
-
MERGE (child)-[:PROMOTED_FROM]->(parent)
|
|
115
|
-
""",
|
|
116
|
-
{"child_id": memory.id, "parent_id": memory.parent_id},
|
|
117
|
-
)
|
|
118
|
-
|
|
119
|
-
for related_id in memory.related_ids:
|
|
120
|
-
self._graph.query(
|
|
121
|
-
"""
|
|
122
|
-
MATCH (a:Memory {id: $a_id})
|
|
123
|
-
MERGE (b:Memory {id: $b_id})
|
|
124
|
-
MERGE (a)-[:RELATED_TO]->(b)
|
|
125
|
-
""",
|
|
126
|
-
{"a_id": memory.id, "b_id": related_id},
|
|
127
|
-
)
|
|
128
|
-
|
|
129
|
-
for tag in memory.tags:
|
|
130
|
-
self._graph.query(
|
|
131
|
-
"""
|
|
132
|
-
MATCH (m:Memory {id: $mem_id})
|
|
133
|
-
MERGE (t:Tag {name: $tag})
|
|
134
|
-
MERGE (m)-[:TAGGED]->(t)
|
|
135
|
-
""",
|
|
136
|
-
{"mem_id": memory.id, "tag": tag},
|
|
137
|
-
)
|
|
138
|
-
|
|
139
|
-
if memory.source == "seed":
|
|
140
|
-
creator = next(
|
|
141
|
-
(t.split(":", 1)[1] for t in memory.tags if t.startswith("creator:")),
|
|
142
|
-
None,
|
|
143
|
-
)
|
|
144
|
-
if creator:
|
|
145
|
-
self._graph.query(
|
|
146
|
-
"""
|
|
147
|
-
MATCH (m:Memory {id: $mem_id})
|
|
148
|
-
MERGE (a:AI {name: $creator})
|
|
149
|
-
MERGE (a)-[:PLANTED]->(m)
|
|
150
|
-
""",
|
|
151
|
-
{"mem_id": memory.id, "creator": creator},
|
|
152
|
-
)
|
|
153
|
-
|
|
154
|
-
return True
|
|
155
|
-
except Exception as e:
|
|
156
|
-
logger.warning("FalkorDB index failed: %s", e)
|
|
157
|
-
return False
|
|
158
|
-
|
|
159
|
-
def get_related(self, memory_id: str, depth: int = 2) -> list[dict]:
|
|
160
|
-
"""Traverse the graph to find related memories.
|
|
161
|
-
|
|
162
|
-
Args:
|
|
163
|
-
memory_id: Starting memory ID.
|
|
164
|
-
depth: How many hops to traverse (1-5).
|
|
165
|
-
|
|
166
|
-
Returns:
|
|
167
|
-
list[dict]: Related memory nodes with relationship info.
|
|
168
|
-
"""
|
|
169
|
-
if not self._ensure_initialized():
|
|
170
|
-
return []
|
|
171
|
-
|
|
172
|
-
try:
|
|
173
|
-
result = self._graph.query(
|
|
174
|
-
f"""
|
|
175
|
-
MATCH (start:Memory {{id: $id}})
|
|
176
|
-
MATCH path = (start)-[*1..{min(depth, 5)}]-(related:Memory)
|
|
177
|
-
WHERE related.id <> $id
|
|
178
|
-
RETURN DISTINCT related.id AS id,
|
|
179
|
-
related.title AS title,
|
|
180
|
-
related.layer AS layer,
|
|
181
|
-
related.intensity AS intensity,
|
|
182
|
-
length(path) AS distance
|
|
183
|
-
ORDER BY distance ASC, related.intensity DESC
|
|
184
|
-
LIMIT 20
|
|
185
|
-
""",
|
|
186
|
-
{"id": memory_id},
|
|
187
|
-
)
|
|
188
|
-
return [
|
|
189
|
-
{
|
|
190
|
-
"id": row[0],
|
|
191
|
-
"title": row[1],
|
|
192
|
-
"layer": row[2],
|
|
193
|
-
"intensity": row[3],
|
|
194
|
-
"distance": row[4],
|
|
195
|
-
}
|
|
196
|
-
for row in result.result_set
|
|
197
|
-
]
|
|
198
|
-
except Exception as e:
|
|
199
|
-
logger.warning("FalkorDB query failed: %s", e)
|
|
200
|
-
return []
|
|
201
|
-
|
|
202
|
-
def get_lineage(self, memory_id: str) -> list[dict]:
|
|
203
|
-
"""Get the promotion/seed lineage chain for a memory.
|
|
204
|
-
|
|
205
|
-
Args:
|
|
206
|
-
memory_id: Starting memory ID.
|
|
207
|
-
|
|
208
|
-
Returns:
|
|
209
|
-
list[dict]: Chain of ancestor memories.
|
|
210
|
-
"""
|
|
211
|
-
if not self._ensure_initialized():
|
|
212
|
-
return []
|
|
213
|
-
|
|
214
|
-
try:
|
|
215
|
-
result = self._graph.query(
|
|
216
|
-
"""
|
|
217
|
-
MATCH (start:Memory {id: $id})
|
|
218
|
-
MATCH path = (start)-[:PROMOTED_FROM*1..10]->(ancestor:Memory)
|
|
219
|
-
RETURN ancestor.id AS id,
|
|
220
|
-
ancestor.title AS title,
|
|
221
|
-
ancestor.layer AS layer,
|
|
222
|
-
length(path) AS depth
|
|
223
|
-
ORDER BY depth ASC
|
|
224
|
-
""",
|
|
225
|
-
{"id": memory_id},
|
|
226
|
-
)
|
|
227
|
-
return [
|
|
228
|
-
{
|
|
229
|
-
"id": row[0],
|
|
230
|
-
"title": row[1],
|
|
231
|
-
"layer": row[2],
|
|
232
|
-
"depth": row[3],
|
|
233
|
-
}
|
|
234
|
-
for row in result.result_set
|
|
235
|
-
]
|
|
236
|
-
except Exception as e:
|
|
237
|
-
logger.warning("FalkorDB lineage query failed: %s", e)
|
|
238
|
-
return []
|
|
239
|
-
|
|
240
|
-
def get_memory_clusters(self, min_connections: int = 2) -> list[dict]:
|
|
241
|
-
"""Find clusters of highly connected memories.
|
|
242
|
-
|
|
243
|
-
Args:
|
|
244
|
-
min_connections: Minimum edges to be considered a cluster center.
|
|
245
|
-
|
|
246
|
-
Returns:
|
|
247
|
-
list[dict]: Cluster centers with connection counts.
|
|
248
|
-
"""
|
|
249
|
-
if not self._ensure_initialized():
|
|
250
|
-
return []
|
|
251
|
-
|
|
252
|
-
try:
|
|
253
|
-
result = self._graph.query(
|
|
254
|
-
"""
|
|
255
|
-
MATCH (m:Memory)-[r]-(connected:Memory)
|
|
256
|
-
WITH m, count(DISTINCT connected) AS connections
|
|
257
|
-
WHERE connections >= $min
|
|
258
|
-
RETURN m.id AS id,
|
|
259
|
-
m.title AS title,
|
|
260
|
-
m.layer AS layer,
|
|
261
|
-
connections
|
|
262
|
-
ORDER BY connections DESC
|
|
263
|
-
LIMIT 20
|
|
264
|
-
""",
|
|
265
|
-
{"min": min_connections},
|
|
266
|
-
)
|
|
267
|
-
return [
|
|
268
|
-
{
|
|
269
|
-
"id": row[0],
|
|
270
|
-
"title": row[1],
|
|
271
|
-
"layer": row[2],
|
|
272
|
-
"connections": row[3],
|
|
273
|
-
}
|
|
274
|
-
for row in result.result_set
|
|
275
|
-
]
|
|
276
|
-
except Exception as e:
|
|
277
|
-
logger.warning("FalkorDB cluster query failed: %s", e)
|
|
278
|
-
return []
|
|
279
|
-
|
|
280
|
-
def health_check(self) -> dict:
|
|
281
|
-
"""Check FalkorDB backend health.
|
|
282
|
-
|
|
283
|
-
Returns:
|
|
284
|
-
dict: Status with connection and graph info.
|
|
285
|
-
"""
|
|
286
|
-
if not self._ensure_initialized():
|
|
287
|
-
return {
|
|
288
|
-
"ok": False,
|
|
289
|
-
"backend": "FalkorDBBackend",
|
|
290
|
-
"error": "Not initialized",
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
try:
|
|
294
|
-
result = self._graph.query(
|
|
295
|
-
"MATCH (n) RETURN count(n) AS nodes"
|
|
296
|
-
)
|
|
297
|
-
node_count = result.result_set[0][0] if result.result_set else 0
|
|
298
|
-
return {
|
|
299
|
-
"ok": True,
|
|
300
|
-
"backend": "FalkorDBBackend",
|
|
301
|
-
"url": self.url,
|
|
302
|
-
"graph": self.graph_name,
|
|
303
|
-
"node_count": node_count,
|
|
304
|
-
}
|
|
305
|
-
except Exception as e:
|
|
306
|
-
return {
|
|
307
|
-
"ok": False,
|
|
308
|
-
"backend": "FalkorDBBackend",
|
|
309
|
-
"error": str(e),
|
|
310
|
-
}
|