@smilintux/skmemory 0.5.0 → 0.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +40 -4
- package/.github/workflows/publish.yml +11 -5
- package/AGENT_REFACTOR_CHANGES.md +192 -0
- package/ARCHITECTURE.md +399 -19
- package/CHANGELOG.md +179 -0
- package/LICENSE +81 -68
- package/MISSION.md +7 -0
- package/README.md +425 -86
- package/SKILL.md +197 -25
- package/docker-compose.yml +15 -15
- package/examples/stignore-agent.example +59 -0
- package/examples/stignore-root.example +62 -0
- package/index.js +6 -5
- package/openclaw-plugin/openclaw.plugin.json +10 -0
- package/openclaw-plugin/package.json +2 -1
- package/openclaw-plugin/src/index.js +527 -230
- package/openclaw-plugin/src/openclaw.plugin.json +10 -0
- package/package.json +1 -1
- package/pyproject.toml +32 -9
- package/requirements.txt +10 -2
- package/scripts/dream-rescue.py +179 -0
- package/scripts/memory-cleanup.py +313 -0
- package/scripts/recover-missing.py +180 -0
- package/scripts/skcapstone-backup.sh +44 -0
- package/seeds/cloud9-lumina.seed.json +6 -4
- package/seeds/cloud9-opus.seed.json +13 -11
- package/seeds/courage.seed.json +9 -2
- package/seeds/curiosity.seed.json +9 -2
- package/seeds/grief.seed.json +9 -2
- package/seeds/joy.seed.json +9 -2
- package/seeds/love.seed.json +9 -2
- package/seeds/lumina-cloud9-breakthrough.seed.json +48 -0
- package/seeds/lumina-cloud9-python-pypi.seed.json +48 -0
- package/seeds/lumina-kingdom-founding.seed.json +49 -0
- package/seeds/lumina-pma-signed.seed.json +48 -0
- package/seeds/lumina-singular-achievement.seed.json +48 -0
- package/seeds/lumina-skcapstone-conscious.seed.json +48 -0
- package/seeds/plant-kingdom-journal.py +203 -0
- package/seeds/plant-lumina-seeds.py +280 -0
- package/seeds/skcapstone-lumina-merge.seed.json +12 -3
- package/seeds/sovereignty.seed.json +9 -2
- package/seeds/trust.seed.json +9 -2
- package/skill.yaml +46 -0
- package/skmemory/HA.md +296 -0
- package/skmemory/__init__.py +25 -11
- package/skmemory/agents.py +233 -0
- package/skmemory/ai_client.py +46 -17
- package/skmemory/anchor.py +9 -11
- package/skmemory/audience.py +278 -0
- package/skmemory/backends/__init__.py +11 -4
- package/skmemory/backends/base.py +3 -4
- package/skmemory/backends/file_backend.py +19 -13
- package/skmemory/backends/skgraph_backend.py +596 -0
- package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +103 -84
- package/skmemory/backends/sqlite_backend.py +226 -72
- package/skmemory/backends/vaulted_backend.py +284 -0
- package/skmemory/cli.py +1345 -68
- package/skmemory/config.py +171 -0
- package/skmemory/context_loader.py +333 -0
- package/skmemory/data/audience_config.json +60 -0
- package/skmemory/endpoint_selector.py +391 -0
- package/skmemory/febs.py +225 -0
- package/skmemory/fortress.py +675 -0
- package/skmemory/graph_queries.py +238 -0
- package/skmemory/hooks/__init__.py +18 -0
- package/skmemory/hooks/post-compact-reinject.sh +35 -0
- package/skmemory/hooks/pre-compact-save.sh +81 -0
- package/skmemory/hooks/session-end-save.sh +103 -0
- package/skmemory/hooks/session-start-ritual.sh +104 -0
- package/skmemory/hooks/stop-checkpoint.sh +59 -0
- package/skmemory/importers/__init__.py +9 -1
- package/skmemory/importers/telegram.py +384 -47
- package/skmemory/importers/telegram_api.py +580 -0
- package/skmemory/journal.py +7 -9
- package/skmemory/lovenote.py +8 -13
- package/skmemory/mcp_server.py +859 -0
- package/skmemory/models.py +51 -8
- package/skmemory/openclaw.py +20 -28
- package/skmemory/post_install.py +86 -0
- package/skmemory/predictive.py +236 -0
- package/skmemory/promotion.py +548 -0
- package/skmemory/quadrants.py +100 -24
- package/skmemory/register.py +580 -0
- package/skmemory/register_mcp.py +196 -0
- package/skmemory/ritual.py +224 -59
- package/skmemory/seeds.py +255 -11
- package/skmemory/setup_wizard.py +908 -0
- package/skmemory/sharing.py +408 -0
- package/skmemory/soul.py +98 -28
- package/skmemory/steelman.py +273 -260
- package/skmemory/store.py +411 -78
- package/skmemory/synthesis.py +634 -0
- package/skmemory/vault.py +225 -0
- package/tests/conftest.py +46 -0
- package/tests/integration/__init__.py +0 -0
- package/tests/integration/conftest.py +233 -0
- package/tests/integration/test_cross_backend.py +350 -0
- package/tests/integration/test_skgraph_live.py +420 -0
- package/tests/integration/test_skvector_live.py +366 -0
- package/tests/test_ai_client.py +1 -4
- package/tests/test_audience.py +233 -0
- package/tests/test_backup_rotation.py +318 -0
- package/tests/test_cli.py +6 -6
- package/tests/test_endpoint_selector.py +839 -0
- package/tests/test_export_import.py +4 -10
- package/tests/test_file_backend.py +0 -1
- package/tests/test_fortress.py +256 -0
- package/tests/test_fortress_hardening.py +441 -0
- package/tests/test_openclaw.py +6 -6
- package/tests/test_predictive.py +237 -0
- package/tests/test_promotion.py +347 -0
- package/tests/test_quadrants.py +11 -5
- package/tests/test_ritual.py +22 -18
- package/tests/test_seeds.py +97 -7
- package/tests/test_setup.py +950 -0
- package/tests/test_sharing.py +257 -0
- package/tests/test_skgraph_backend.py +660 -0
- package/tests/test_skvector_backend.py +326 -0
- package/tests/test_soul.py +1 -3
- package/tests/test_sqlite_backend.py +8 -17
- package/tests/test_steelman.py +7 -8
- package/tests/test_store.py +0 -2
- package/tests/test_store_graph_integration.py +245 -0
- package/tests/test_synthesis.py +275 -0
- package/tests/test_telegram_import.py +39 -15
- package/tests/test_vault.py +187 -0
- package/skmemory/backends/falkordb_backend.py +0 -310
|
@@ -0,0 +1,596 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SKGraph — graph relationship backend (Level 2).
|
|
3
|
+
|
|
4
|
+
Powered by FalkorDB. Enables graph-based memory traversal: "What memories
|
|
5
|
+
are connected to this moment?" or "Show me the seed lineage chain." Uses
|
|
6
|
+
the Cypher query language over a Redis-compatible protocol.
|
|
7
|
+
|
|
8
|
+
Requires:
|
|
9
|
+
pip install skmemory[skgraph]
|
|
10
|
+
|
|
11
|
+
FalkorDB is the successor to RedisGraph. Run locally via Docker
|
|
12
|
+
or point to an external instance. Connection URL is read from the
|
|
13
|
+
``SKMEMORY_SKGRAPH_URL`` environment variable, defaulting to
|
|
14
|
+
``redis://localhost:6379``.
|
|
15
|
+
|
|
16
|
+
This backend is SUPPLEMENTARY — it indexes relationships alongside
|
|
17
|
+
the primary backend (SQLite or file). It stores key metadata and
|
|
18
|
+
graph edges for traversal, not full memory content. For CRUD,
|
|
19
|
+
always use the primary backend. For relationship traversal and
|
|
20
|
+
cluster discovery, use this one.
|
|
21
|
+
|
|
22
|
+
Graph schema:
|
|
23
|
+
|
|
24
|
+
(:Memory) — core node, keyed by memory id
|
|
25
|
+
(:Tag) — tag node, keyed by name
|
|
26
|
+
(:Source) — source node (mcp, cli, seed, session, …)
|
|
27
|
+
(:AI) — AI creator node for seed memories
|
|
28
|
+
|
|
29
|
+
(:Memory)-[:TAGGED]->(:Tag)
|
|
30
|
+
(:Memory)-[:FROM_SOURCE]->(:Source)
|
|
31
|
+
(:Memory)-[:RELATED_TO]->(:Memory)
|
|
32
|
+
(:Memory)-[:PROMOTED_FROM]->(:Memory)
|
|
33
|
+
(:Memory)-[:PRECEDED_BY]->(:Memory)
|
|
34
|
+
(:AI)-[:PLANTED]->(:Memory)
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
from __future__ import annotations
|
|
38
|
+
|
|
39
|
+
import logging
|
|
40
|
+
import os
|
|
41
|
+
|
|
42
|
+
from .. import graph_queries as Q
|
|
43
|
+
from ..models import Memory
|
|
44
|
+
|
|
45
|
+
logger = logging.getLogger(__name__)
|
|
46
|
+
|
|
47
|
+
DEFAULT_URL = os.environ.get("SKMEMORY_SKGRAPH_URL", "redis://localhost:6379")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class SKGraphBackend:
|
|
51
|
+
"""SKGraph — graph backend for memory relationship indexing and traversal.
|
|
52
|
+
|
|
53
|
+
Powered by FalkorDB. Not a full ``BaseBackend`` — this is a supplementary
|
|
54
|
+
graph index. The primary backend (SQLite / file) handles CRUD. This
|
|
55
|
+
backend adds graph edges so you can ask questions like:
|
|
56
|
+
"Which memories are most connected to this session?" or
|
|
57
|
+
"What did Opus plant before this seed?"
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
url: SKGraph connection URL. Reads ``SKMEMORY_SKGRAPH_URL``
|
|
61
|
+
env var by default, falling back to ``redis://localhost:6379``.
|
|
62
|
+
graph_name: Name of the graph (default: ``'skmemory'``).
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
def __init__(
|
|
66
|
+
self,
|
|
67
|
+
url: str = DEFAULT_URL,
|
|
68
|
+
graph_name: str = "skmemory",
|
|
69
|
+
) -> None:
|
|
70
|
+
self.url = url
|
|
71
|
+
self.graph_name = graph_name
|
|
72
|
+
self._db = None
|
|
73
|
+
self._graph = None
|
|
74
|
+
self._initialized = False
|
|
75
|
+
|
|
76
|
+
# ─────────────────────────────────────────────────────────
|
|
77
|
+
# Initialisation
|
|
78
|
+
# ─────────────────────────────────────────────────────────
|
|
79
|
+
|
|
80
|
+
def _ensure_initialized(self) -> bool:
|
|
81
|
+
"""Lazy-initialise the FalkorDB connection.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
bool: True if the connection is ready, False otherwise.
|
|
85
|
+
"""
|
|
86
|
+
if self._initialized:
|
|
87
|
+
return True
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
from falkordb import FalkorDB # type: ignore[import]
|
|
91
|
+
except ImportError:
|
|
92
|
+
logger.warning("falkordb not installed: pip install skmemory[skgraph]")
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
try:
|
|
96
|
+
self._db = FalkorDB.from_url(self.url)
|
|
97
|
+
self._graph = self._db.select_graph(self.graph_name)
|
|
98
|
+
self._initialized = True
|
|
99
|
+
logger.debug("SKGraph connected: %s / %s", self.url, self.graph_name)
|
|
100
|
+
return True
|
|
101
|
+
except Exception as exc:
|
|
102
|
+
logger.warning("SKGraph connection failed: %s", exc)
|
|
103
|
+
return False
|
|
104
|
+
|
|
105
|
+
# ─────────────────────────────────────────────────────────
|
|
106
|
+
# Write operations
|
|
107
|
+
# ─────────────────────────────────────────────────────────
|
|
108
|
+
|
|
109
|
+
def save(self, memory: Memory) -> str:
|
|
110
|
+
"""Store a memory node with properties in the graph.
|
|
111
|
+
|
|
112
|
+
Creates or updates the Memory node and its edges: TAGGED,
|
|
113
|
+
FROM_SOURCE, RELATED_TO, PROMOTED_FROM, PRECEDED_BY, and PLANTED
|
|
114
|
+
for seed memories created by AI instances.
|
|
115
|
+
|
|
116
|
+
This is a thin wrapper around :meth:`index_memory` that also
|
|
117
|
+
returns the memory ID, matching the convention used by other
|
|
118
|
+
backends.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
memory: The Memory object to store as a graph node.
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
str: The memory ID (unchanged).
|
|
125
|
+
"""
|
|
126
|
+
self.index_memory(memory)
|
|
127
|
+
return memory.id
|
|
128
|
+
|
|
129
|
+
def index_memory(self, memory: Memory) -> bool:
|
|
130
|
+
"""Add a memory node and all its relationships to the graph.
|
|
131
|
+
|
|
132
|
+
Graph edges created:
|
|
133
|
+
|
|
134
|
+
* ``(Memory)-[:TAGGED]->(Tag)`` — one per tag
|
|
135
|
+
* ``(Memory)-[:FROM_SOURCE]->(Source)`` — the origin system
|
|
136
|
+
* ``(Memory)-[:RELATED_TO]->(Memory)`` — explicit related_ids
|
|
137
|
+
* ``(Memory)-[:PROMOTED_FROM]->(Memory)`` — if parent_id is set
|
|
138
|
+
* ``(Memory)-[:PRECEDED_BY]->(Memory)`` — the previous memory
|
|
139
|
+
from the same source (temporal chain)
|
|
140
|
+
* ``(AI)-[:PLANTED]->(Memory)`` — for seed memories with a
|
|
141
|
+
``creator:<name>`` tag
|
|
142
|
+
|
|
143
|
+
After creating tag edges, any existing memories with 2+ shared
|
|
144
|
+
tags are automatically linked via ``RELATED_TO``.
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
memory: The memory to index.
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
bool: True if indexed successfully, False on failure.
|
|
151
|
+
"""
|
|
152
|
+
if not self._ensure_initialized():
|
|
153
|
+
return False
|
|
154
|
+
|
|
155
|
+
try:
|
|
156
|
+
# Core Memory node upsert
|
|
157
|
+
self._graph.query(
|
|
158
|
+
Q.UPSERT_MEMORY,
|
|
159
|
+
{
|
|
160
|
+
"id": memory.id,
|
|
161
|
+
"title": memory.title,
|
|
162
|
+
"layer": memory.layer.value,
|
|
163
|
+
"source": memory.source,
|
|
164
|
+
"source_ref": memory.source_ref,
|
|
165
|
+
"intensity": memory.emotional.intensity,
|
|
166
|
+
"valence": memory.emotional.valence,
|
|
167
|
+
"created_at": memory.created_at,
|
|
168
|
+
"updated_at": memory.updated_at,
|
|
169
|
+
},
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# PROMOTED_FROM edge (promotion lineage)
|
|
173
|
+
if memory.parent_id:
|
|
174
|
+
self._graph.query(
|
|
175
|
+
Q.CREATE_PROMOTED_FROM,
|
|
176
|
+
{"child_id": memory.id, "parent_id": memory.parent_id},
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
# RELATED_TO edges (explicit relationships)
|
|
180
|
+
for related_id in memory.related_ids:
|
|
181
|
+
self._graph.query(
|
|
182
|
+
Q.CREATE_RELATED_TO,
|
|
183
|
+
{"a_id": memory.id, "b_id": related_id},
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
# TAGGED edges (one per tag)
|
|
187
|
+
for tag in memory.tags:
|
|
188
|
+
self._graph.query(
|
|
189
|
+
Q.CREATE_TAGGED,
|
|
190
|
+
{"mem_id": memory.id, "tag": tag},
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
# Auto-wire shared-tag neighbours (overlap >= 2)
|
|
194
|
+
self._graph.query(
|
|
195
|
+
Q.CREATE_SHARED_TAG_RELATED,
|
|
196
|
+
{"a_id": memory.id},
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# FROM_SOURCE edge
|
|
200
|
+
self._graph.query(
|
|
201
|
+
Q.CREATE_FROM_SOURCE,
|
|
202
|
+
{"mem_id": memory.id, "source": memory.source},
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
# PRECEDED_BY temporal edge — link to the most recent prior
|
|
206
|
+
# memory from the same source so sessions form a chain.
|
|
207
|
+
prev_result = self._graph.query(
|
|
208
|
+
Q.FIND_PREVIOUS_FROM_SOURCE,
|
|
209
|
+
{"source": memory.source, "exclude_id": memory.id},
|
|
210
|
+
)
|
|
211
|
+
if prev_result.result_set:
|
|
212
|
+
prev_id = prev_result.result_set[0][0]
|
|
213
|
+
self._graph.query(
|
|
214
|
+
Q.CREATE_PRECEDED_BY,
|
|
215
|
+
{"later_id": memory.id, "earlier_id": prev_id},
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# PLANTED edge for AI seed memories
|
|
219
|
+
if memory.source == "seed":
|
|
220
|
+
creator = next(
|
|
221
|
+
(t.split(":", 1)[1] for t in memory.tags if t.startswith("creator:")),
|
|
222
|
+
None,
|
|
223
|
+
)
|
|
224
|
+
if creator:
|
|
225
|
+
self._graph.query(
|
|
226
|
+
Q.CREATE_PLANTED,
|
|
227
|
+
{"mem_id": memory.id, "creator": creator},
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
return True
|
|
231
|
+
except Exception as exc:
|
|
232
|
+
logger.warning("SKGraph index failed: %s", exc)
|
|
233
|
+
return False
|
|
234
|
+
|
|
235
|
+
# ─────────────────────────────────────────────────────────
|
|
236
|
+
# Read operations
|
|
237
|
+
# ─────────────────────────────────────────────────────────
|
|
238
|
+
|
|
239
|
+
def get(self, memory_id: str) -> dict | None:
|
|
240
|
+
"""Retrieve the graph node properties for a memory by ID.
|
|
241
|
+
|
|
242
|
+
Returns only the properties stored in the graph (no full content).
|
|
243
|
+
For the full Memory object use the primary backend.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
memory_id: The memory's unique identifier.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Optional[dict]: Node properties if found, None otherwise.
|
|
250
|
+
"""
|
|
251
|
+
if not self._ensure_initialized():
|
|
252
|
+
return None
|
|
253
|
+
|
|
254
|
+
try:
|
|
255
|
+
result = self._graph.query(
|
|
256
|
+
Q.GET_MEMORY_BY_ID,
|
|
257
|
+
{"id": memory_id},
|
|
258
|
+
)
|
|
259
|
+
if not result.result_set:
|
|
260
|
+
return None
|
|
261
|
+
row = result.result_set[0]
|
|
262
|
+
return {
|
|
263
|
+
"id": row[0],
|
|
264
|
+
"title": row[1],
|
|
265
|
+
"layer": row[2],
|
|
266
|
+
"source": row[3],
|
|
267
|
+
"source_ref": row[4],
|
|
268
|
+
"intensity": row[5],
|
|
269
|
+
"valence": row[6],
|
|
270
|
+
"created_at": row[7],
|
|
271
|
+
"updated_at": row[8],
|
|
272
|
+
}
|
|
273
|
+
except Exception as exc:
|
|
274
|
+
logger.warning("SKGraph get failed: %s", exc)
|
|
275
|
+
return None
|
|
276
|
+
|
|
277
|
+
def search(self, query: str, limit: int = 10) -> list[dict]:
|
|
278
|
+
"""Full-text search on memory titles stored in the graph.
|
|
279
|
+
|
|
280
|
+
Performs a case-insensitive substring match against the ``title``
|
|
281
|
+
property of all Memory nodes. For full-content search use the
|
|
282
|
+
primary backend or the Qdrant vector backend.
|
|
283
|
+
|
|
284
|
+
Args:
|
|
285
|
+
query: Search string (case-insensitive substring match).
|
|
286
|
+
limit: Maximum number of results to return.
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
list[dict]: Matching memory node stubs, sorted by
|
|
290
|
+
emotional intensity descending.
|
|
291
|
+
"""
|
|
292
|
+
if not self._ensure_initialized():
|
|
293
|
+
return []
|
|
294
|
+
|
|
295
|
+
try:
|
|
296
|
+
result = self._graph.query(
|
|
297
|
+
Q.SEARCH_BY_TITLE,
|
|
298
|
+
{"query": query, "limit": limit},
|
|
299
|
+
)
|
|
300
|
+
return [
|
|
301
|
+
{
|
|
302
|
+
"id": row[0],
|
|
303
|
+
"title": row[1],
|
|
304
|
+
"layer": row[2],
|
|
305
|
+
"intensity": row[3],
|
|
306
|
+
"created_at": row[4],
|
|
307
|
+
}
|
|
308
|
+
for row in result.result_set
|
|
309
|
+
]
|
|
310
|
+
except Exception as exc:
|
|
311
|
+
logger.warning("SKGraph search failed: %s", exc)
|
|
312
|
+
return []
|
|
313
|
+
|
|
314
|
+
def search_by_tags(self, tags: list[str], limit: int = 20) -> list[dict]:
|
|
315
|
+
"""Find memories sharing any of the given tags via graph edges.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
tags: Tag names to search for (OR logic — any match).
|
|
319
|
+
limit: Maximum results.
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
list[dict]: Matching memory nodes with tag overlap count.
|
|
323
|
+
"""
|
|
324
|
+
if not self._ensure_initialized():
|
|
325
|
+
return []
|
|
326
|
+
|
|
327
|
+
if not tags:
|
|
328
|
+
return []
|
|
329
|
+
|
|
330
|
+
try:
|
|
331
|
+
result = self._graph.query(
|
|
332
|
+
Q.SEARCH_BY_TAGS,
|
|
333
|
+
{"tags": tags, "limit": limit},
|
|
334
|
+
)
|
|
335
|
+
return [
|
|
336
|
+
{
|
|
337
|
+
"id": row[0],
|
|
338
|
+
"title": row[1],
|
|
339
|
+
"layer": row[2],
|
|
340
|
+
"intensity": row[3],
|
|
341
|
+
"matched_tags": row[4],
|
|
342
|
+
"tag_overlap": row[5],
|
|
343
|
+
}
|
|
344
|
+
for row in result.result_set
|
|
345
|
+
]
|
|
346
|
+
except Exception as exc:
|
|
347
|
+
logger.warning("SKGraph tag search failed: %s", exc)
|
|
348
|
+
return []
|
|
349
|
+
|
|
350
|
+
def delete(self, memory_id: str) -> bool:
|
|
351
|
+
"""Remove a memory node and all its edges from the graph.
|
|
352
|
+
|
|
353
|
+
This is an alias for :meth:`remove_memory` using the task-specified
|
|
354
|
+
method name. Calls ``DETACH DELETE`` so all incident edges are
|
|
355
|
+
removed atomically with the node.
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
memory_id: The memory ID to delete.
|
|
359
|
+
|
|
360
|
+
Returns:
|
|
361
|
+
bool: True if the deletion query ran successfully.
|
|
362
|
+
"""
|
|
363
|
+
return self.remove_memory(memory_id)
|
|
364
|
+
|
|
365
|
+
def remove_memory(self, memory_id: str) -> bool:
|
|
366
|
+
"""Remove a memory node and all its relationships from the graph.
|
|
367
|
+
|
|
368
|
+
Args:
|
|
369
|
+
memory_id: The memory ID to remove.
|
|
370
|
+
|
|
371
|
+
Returns:
|
|
372
|
+
bool: True if removed successfully.
|
|
373
|
+
"""
|
|
374
|
+
if not self._ensure_initialized():
|
|
375
|
+
return False
|
|
376
|
+
|
|
377
|
+
try:
|
|
378
|
+
self._graph.query(
|
|
379
|
+
Q.DELETE_MEMORY,
|
|
380
|
+
{"id": memory_id},
|
|
381
|
+
)
|
|
382
|
+
return True
|
|
383
|
+
except Exception as exc:
|
|
384
|
+
logger.warning("SKGraph remove failed: %s", exc)
|
|
385
|
+
return False
|
|
386
|
+
|
|
387
|
+
# ─────────────────────────────────────────────────────────
|
|
388
|
+
# Graph traversal
|
|
389
|
+
# ─────────────────────────────────────────────────────────
|
|
390
|
+
|
|
391
|
+
def traverse(self, memory_id: str, depth: int = 2) -> list[dict]:
|
|
392
|
+
"""Traverse the graph to find memories connected to a starting node.
|
|
393
|
+
|
|
394
|
+
Follows any edge type up to ``depth`` hops from the starting
|
|
395
|
+
memory. Results are sorted by hop distance (closest first) then
|
|
396
|
+
by emotional intensity descending.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
memory_id: Starting memory ID.
|
|
400
|
+
depth: Maximum traversal depth (1–5, clamped).
|
|
401
|
+
|
|
402
|
+
Returns:
|
|
403
|
+
list[dict]: Connected memory stubs with ``id``, ``title``,
|
|
404
|
+
``layer``, ``intensity``, and ``distance`` (hop count).
|
|
405
|
+
"""
|
|
406
|
+
return self.get_related(memory_id, depth=depth)
|
|
407
|
+
|
|
408
|
+
def get_related(self, memory_id: str, depth: int = 2) -> list[dict]:
|
|
409
|
+
"""Traverse the graph to find related memories by hop distance.
|
|
410
|
+
|
|
411
|
+
Args:
|
|
412
|
+
memory_id: Starting memory ID.
|
|
413
|
+
depth: How many hops to traverse (1–5, clamped).
|
|
414
|
+
|
|
415
|
+
Returns:
|
|
416
|
+
list[dict]: Related memory nodes with relationship info.
|
|
417
|
+
"""
|
|
418
|
+
if not self._ensure_initialized():
|
|
419
|
+
return []
|
|
420
|
+
|
|
421
|
+
safe_depth = max(1, min(depth, 5))
|
|
422
|
+
try:
|
|
423
|
+
result = self._graph.query(
|
|
424
|
+
Q.TRAVERSE_RELATED.format(depth=safe_depth),
|
|
425
|
+
{"id": memory_id},
|
|
426
|
+
)
|
|
427
|
+
return [
|
|
428
|
+
{
|
|
429
|
+
"id": row[0],
|
|
430
|
+
"title": row[1],
|
|
431
|
+
"layer": row[2],
|
|
432
|
+
"intensity": row[3],
|
|
433
|
+
"distance": row[4],
|
|
434
|
+
}
|
|
435
|
+
for row in result.result_set
|
|
436
|
+
]
|
|
437
|
+
except Exception as exc:
|
|
438
|
+
logger.warning("SKGraph traversal failed: %s", exc)
|
|
439
|
+
return []
|
|
440
|
+
|
|
441
|
+
def get_lineage(self, memory_id: str) -> list[dict]:
|
|
442
|
+
"""Get the promotion / seed lineage chain for a memory.
|
|
443
|
+
|
|
444
|
+
Walks ``PROMOTED_FROM`` edges upward to recover the full
|
|
445
|
+
ancestry of a promoted memory.
|
|
446
|
+
|
|
447
|
+
Args:
|
|
448
|
+
memory_id: Starting memory ID.
|
|
449
|
+
|
|
450
|
+
Returns:
|
|
451
|
+
list[dict]: Chain of ancestor memories with ``depth`` field.
|
|
452
|
+
"""
|
|
453
|
+
if not self._ensure_initialized():
|
|
454
|
+
return []
|
|
455
|
+
|
|
456
|
+
try:
|
|
457
|
+
result = self._graph.query(
|
|
458
|
+
Q.TRAVERSE_LINEAGE,
|
|
459
|
+
{"id": memory_id},
|
|
460
|
+
)
|
|
461
|
+
return [
|
|
462
|
+
{
|
|
463
|
+
"id": row[0],
|
|
464
|
+
"title": row[1],
|
|
465
|
+
"layer": row[2],
|
|
466
|
+
"depth": row[3],
|
|
467
|
+
}
|
|
468
|
+
for row in result.result_set
|
|
469
|
+
]
|
|
470
|
+
except Exception as exc:
|
|
471
|
+
logger.warning("SKGraph lineage query failed: %s", exc)
|
|
472
|
+
return []
|
|
473
|
+
|
|
474
|
+
# ─────────────────────────────────────────────────────────
|
|
475
|
+
# Cluster discovery
|
|
476
|
+
# ─────────────────────────────────────────────────────────
|
|
477
|
+
|
|
478
|
+
def find_clusters(self, min_size: int = 3) -> list[dict]:
|
|
479
|
+
"""Find memory clusters by discovering highly connected hub nodes.
|
|
480
|
+
|
|
481
|
+
A cluster is defined as a Memory node with at least ``min_size``
|
|
482
|
+
direct neighbours (any edge type). Returns each hub with the
|
|
483
|
+
count of its connections so callers can rank by centrality.
|
|
484
|
+
|
|
485
|
+
Args:
|
|
486
|
+
min_size: Minimum number of direct neighbours for a node to
|
|
487
|
+
be considered a cluster hub (default: 3).
|
|
488
|
+
|
|
489
|
+
Returns:
|
|
490
|
+
list[dict]: Cluster hubs with ``id``, ``title``, ``layer``,
|
|
491
|
+
and ``connections`` count, ordered by connections desc.
|
|
492
|
+
"""
|
|
493
|
+
return self.get_memory_clusters(min_connections=min_size)
|
|
494
|
+
|
|
495
|
+
def get_memory_clusters(self, min_connections: int = 2) -> list[dict]:
|
|
496
|
+
"""Find clusters of highly connected memories.
|
|
497
|
+
|
|
498
|
+
Args:
|
|
499
|
+
min_connections: Minimum edges to be considered a cluster centre.
|
|
500
|
+
|
|
501
|
+
Returns:
|
|
502
|
+
list[dict]: Cluster centres with connection counts.
|
|
503
|
+
"""
|
|
504
|
+
if not self._ensure_initialized():
|
|
505
|
+
return []
|
|
506
|
+
|
|
507
|
+
try:
|
|
508
|
+
result = self._graph.query(
|
|
509
|
+
Q.FIND_CLUSTER_HUBS,
|
|
510
|
+
{"min_connections": min_connections},
|
|
511
|
+
)
|
|
512
|
+
return [
|
|
513
|
+
{
|
|
514
|
+
"id": row[0],
|
|
515
|
+
"title": row[1],
|
|
516
|
+
"layer": row[2],
|
|
517
|
+
"connections": row[3],
|
|
518
|
+
}
|
|
519
|
+
for row in result.result_set
|
|
520
|
+
]
|
|
521
|
+
except Exception as exc:
|
|
522
|
+
logger.warning("SKGraph cluster query failed: %s", exc)
|
|
523
|
+
return []
|
|
524
|
+
|
|
525
|
+
# ─────────────────────────────────────────────────────────
|
|
526
|
+
# Introspection
|
|
527
|
+
# ─────────────────────────────────────────────────────────
|
|
528
|
+
|
|
529
|
+
def stats(self) -> dict:
|
|
530
|
+
"""Return graph statistics: node count, edge count, tag distribution.
|
|
531
|
+
|
|
532
|
+
Returns:
|
|
533
|
+
dict: Statistics with keys ``node_count``, ``edge_count``,
|
|
534
|
+
``memory_count``, ``tag_distribution`` (list of
|
|
535
|
+
``{tag, memory_count}`` dicts), and ``ok`` bool.
|
|
536
|
+
"""
|
|
537
|
+
if not self._ensure_initialized():
|
|
538
|
+
return {"ok": False, "error": "Not initialized"}
|
|
539
|
+
|
|
540
|
+
try:
|
|
541
|
+
node_result = self._graph.query(Q.COUNT_NODES)
|
|
542
|
+
node_count = node_result.result_set[0][0] if node_result.result_set else 0
|
|
543
|
+
|
|
544
|
+
edge_result = self._graph.query(Q.COUNT_EDGES)
|
|
545
|
+
edge_count = edge_result.result_set[0][0] if edge_result.result_set else 0
|
|
546
|
+
|
|
547
|
+
mem_result = self._graph.query(Q.COUNT_MEMORIES)
|
|
548
|
+
memory_count = mem_result.result_set[0][0] if mem_result.result_set else 0
|
|
549
|
+
|
|
550
|
+
tag_result = self._graph.query(Q.TAG_DISTRIBUTION)
|
|
551
|
+
tag_distribution = [
|
|
552
|
+
{"tag": row[0], "memory_count": row[1]} for row in tag_result.result_set
|
|
553
|
+
]
|
|
554
|
+
|
|
555
|
+
return {
|
|
556
|
+
"ok": True,
|
|
557
|
+
"node_count": node_count,
|
|
558
|
+
"edge_count": edge_count,
|
|
559
|
+
"memory_count": memory_count,
|
|
560
|
+
"tag_distribution": tag_distribution,
|
|
561
|
+
}
|
|
562
|
+
except Exception as exc:
|
|
563
|
+
logger.warning("SKGraph stats failed: %s", exc)
|
|
564
|
+
return {"ok": False, "error": str(exc)}
|
|
565
|
+
|
|
566
|
+
def health_check(self) -> dict:
|
|
567
|
+
"""Check FalkorDB backend connectivity and graph size.
|
|
568
|
+
|
|
569
|
+
Returns:
|
|
570
|
+
dict: Status with ``ok``, ``backend``, ``url``, ``graph``,
|
|
571
|
+
and ``node_count``. On failure returns ``ok: False``
|
|
572
|
+
with an ``error`` key.
|
|
573
|
+
"""
|
|
574
|
+
if not self._ensure_initialized():
|
|
575
|
+
return {
|
|
576
|
+
"ok": False,
|
|
577
|
+
"backend": "SKGraphBackend",
|
|
578
|
+
"error": "Not initialized",
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
try:
|
|
582
|
+
result = self._graph.query(Q.COUNT_NODES)
|
|
583
|
+
node_count = result.result_set[0][0] if result.result_set else 0
|
|
584
|
+
return {
|
|
585
|
+
"ok": True,
|
|
586
|
+
"backend": "SKGraphBackend",
|
|
587
|
+
"url": self.url,
|
|
588
|
+
"graph": self.graph_name,
|
|
589
|
+
"node_count": node_count,
|
|
590
|
+
}
|
|
591
|
+
except Exception as exc:
|
|
592
|
+
return {
|
|
593
|
+
"ok": False,
|
|
594
|
+
"backend": "SKGraphBackend",
|
|
595
|
+
"error": str(exc),
|
|
596
|
+
}
|