@smilintux/skmemory 0.5.0 → 0.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/.github/workflows/ci.yml +39 -3
  2. package/.github/workflows/publish.yml +13 -6
  3. package/AGENT_REFACTOR_CHANGES.md +192 -0
  4. package/ARCHITECTURE.md +101 -19
  5. package/CHANGELOG.md +153 -0
  6. package/LICENSE +81 -68
  7. package/MISSION.md +7 -0
  8. package/README.md +419 -86
  9. package/SKILL.md +197 -25
  10. package/docker-compose.yml +15 -15
  11. package/index.js +6 -5
  12. package/openclaw-plugin/openclaw.plugin.json +10 -0
  13. package/openclaw-plugin/src/index.ts +255 -0
  14. package/openclaw-plugin/src/openclaw.plugin.json +10 -0
  15. package/package.json +1 -1
  16. package/pyproject.toml +29 -9
  17. package/requirements.txt +10 -2
  18. package/seeds/cloud9-opus.seed.json +7 -7
  19. package/seeds/lumina-cloud9-breakthrough.seed.json +46 -0
  20. package/seeds/lumina-cloud9-python-pypi.seed.json +46 -0
  21. package/seeds/lumina-kingdom-founding.seed.json +47 -0
  22. package/seeds/lumina-pma-signed.seed.json +46 -0
  23. package/seeds/lumina-singular-achievement.seed.json +46 -0
  24. package/seeds/lumina-skcapstone-conscious.seed.json +46 -0
  25. package/seeds/plant-kingdom-journal.py +203 -0
  26. package/seeds/plant-lumina-seeds.py +280 -0
  27. package/skill.yaml +46 -0
  28. package/skmemory/HA.md +296 -0
  29. package/skmemory/__init__.py +12 -1
  30. package/skmemory/agents.py +233 -0
  31. package/skmemory/ai_client.py +40 -0
  32. package/skmemory/anchor.py +4 -2
  33. package/skmemory/backends/__init__.py +11 -4
  34. package/skmemory/backends/file_backend.py +2 -1
  35. package/skmemory/backends/skgraph_backend.py +608 -0
  36. package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +99 -69
  37. package/skmemory/backends/sqlite_backend.py +122 -51
  38. package/skmemory/backends/vaulted_backend.py +286 -0
  39. package/skmemory/cli.py +1238 -29
  40. package/skmemory/config.py +173 -0
  41. package/skmemory/context_loader.py +335 -0
  42. package/skmemory/endpoint_selector.py +386 -0
  43. package/skmemory/fortress.py +685 -0
  44. package/skmemory/graph_queries.py +238 -0
  45. package/skmemory/importers/__init__.py +9 -1
  46. package/skmemory/importers/telegram.py +351 -43
  47. package/skmemory/importers/telegram_api.py +488 -0
  48. package/skmemory/journal.py +4 -2
  49. package/skmemory/lovenote.py +4 -2
  50. package/skmemory/mcp_server.py +706 -0
  51. package/skmemory/models.py +41 -0
  52. package/skmemory/openclaw.py +8 -8
  53. package/skmemory/predictive.py +232 -0
  54. package/skmemory/promotion.py +524 -0
  55. package/skmemory/register.py +454 -0
  56. package/skmemory/register_mcp.py +197 -0
  57. package/skmemory/ritual.py +121 -47
  58. package/skmemory/seeds.py +257 -8
  59. package/skmemory/setup_wizard.py +920 -0
  60. package/skmemory/sharing.py +402 -0
  61. package/skmemory/soul.py +71 -20
  62. package/skmemory/steelman.py +250 -263
  63. package/skmemory/store.py +271 -60
  64. package/skmemory/vault.py +228 -0
  65. package/tests/integration/__init__.py +0 -0
  66. package/tests/integration/conftest.py +233 -0
  67. package/tests/integration/test_cross_backend.py +355 -0
  68. package/tests/integration/test_skgraph_live.py +424 -0
  69. package/tests/integration/test_skvector_live.py +369 -0
  70. package/tests/test_backup_rotation.py +327 -0
  71. package/tests/test_cli.py +6 -6
  72. package/tests/test_endpoint_selector.py +801 -0
  73. package/tests/test_fortress.py +255 -0
  74. package/tests/test_fortress_hardening.py +444 -0
  75. package/tests/test_openclaw.py +5 -2
  76. package/tests/test_predictive.py +237 -0
  77. package/tests/test_promotion.py +340 -0
  78. package/tests/test_ritual.py +4 -4
  79. package/tests/test_seeds.py +96 -0
  80. package/tests/test_setup.py +835 -0
  81. package/tests/test_sharing.py +250 -0
  82. package/tests/test_skgraph_backend.py +667 -0
  83. package/tests/test_skvector_backend.py +326 -0
  84. package/tests/test_steelman.py +5 -5
  85. package/tests/test_store_graph_integration.py +245 -0
  86. package/tests/test_vault.py +186 -0
  87. package/skmemory/backends/falkordb_backend.py +0 -310
@@ -0,0 +1,608 @@
1
+ """
2
+ SKGraph — graph relationship backend (Level 2).
3
+
4
+ Powered by FalkorDB. Enables graph-based memory traversal: "What memories
5
+ are connected to this moment?" or "Show me the seed lineage chain." Uses
6
+ the Cypher query language over a Redis-compatible protocol.
7
+
8
+ Requires:
9
+ pip install skmemory[skgraph]
10
+
11
+ FalkorDB is the successor to RedisGraph. Run locally via Docker
12
+ or point to an external instance. Connection URL is read from the
13
+ ``SKMEMORY_SKGRAPH_URL`` environment variable, defaulting to
14
+ ``redis://localhost:6379``.
15
+
16
+ This backend is SUPPLEMENTARY — it indexes relationships alongside
17
+ the primary backend (SQLite or file). It stores key metadata and
18
+ graph edges for traversal, not full memory content. For CRUD,
19
+ always use the primary backend. For relationship traversal and
20
+ cluster discovery, use this one.
21
+
22
+ Graph schema:
23
+
24
+ (:Memory) — core node, keyed by memory id
25
+ (:Tag) — tag node, keyed by name
26
+ (:Source) — source node (mcp, cli, seed, session, …)
27
+ (:AI) — AI creator node for seed memories
28
+
29
+ (:Memory)-[:TAGGED]->(:Tag)
30
+ (:Memory)-[:FROM_SOURCE]->(:Source)
31
+ (:Memory)-[:RELATED_TO]->(:Memory)
32
+ (:Memory)-[:PROMOTED_FROM]->(:Memory)
33
+ (:Memory)-[:PRECEDED_BY]->(:Memory)
34
+ (:AI)-[:PLANTED]->(:Memory)
35
+ """
36
+
37
+ from __future__ import annotations
38
+
39
+ import logging
40
+ import os
41
+ from typing import Optional
42
+
43
+ from ..models import Memory, MemoryLayer
44
+ from .. import graph_queries as Q
45
+
46
+ logger = logging.getLogger(__name__)
47
+
48
+ DEFAULT_URL = os.environ.get("SKMEMORY_SKGRAPH_URL", "redis://localhost:6379")
49
+
50
+
51
+ class SKGraphBackend:
52
+ """SKGraph — graph backend for memory relationship indexing and traversal.
53
+
54
+ Powered by FalkorDB. Not a full ``BaseBackend`` — this is a supplementary
55
+ graph index. The primary backend (SQLite / file) handles CRUD. This
56
+ backend adds graph edges so you can ask questions like:
57
+ "Which memories are most connected to this session?" or
58
+ "What did Opus plant before this seed?"
59
+
60
+ Args:
61
+ url: SKGraph connection URL. Reads ``SKMEMORY_SKGRAPH_URL``
62
+ env var by default, falling back to ``redis://localhost:6379``.
63
+ graph_name: Name of the graph (default: ``'skmemory'``).
64
+ """
65
+
66
+ def __init__(
67
+ self,
68
+ url: str = DEFAULT_URL,
69
+ graph_name: str = "skmemory",
70
+ ) -> None:
71
+ self.url = url
72
+ self.graph_name = graph_name
73
+ self._db = None
74
+ self._graph = None
75
+ self._initialized = False
76
+
77
+ # ─────────────────────────────────────────────────────────
78
+ # Initialisation
79
+ # ─────────────────────────────────────────────────────────
80
+
81
+ def _ensure_initialized(self) -> bool:
82
+ """Lazy-initialise the FalkorDB connection.
83
+
84
+ Returns:
85
+ bool: True if the connection is ready, False otherwise.
86
+ """
87
+ if self._initialized:
88
+ return True
89
+
90
+ try:
91
+ from falkordb import FalkorDB # type: ignore[import]
92
+ except ImportError:
93
+ logger.warning("falkordb not installed: pip install skmemory[skgraph]")
94
+ return False
95
+
96
+ try:
97
+ self._db = FalkorDB.from_url(self.url)
98
+ self._graph = self._db.select_graph(self.graph_name)
99
+ self._initialized = True
100
+ logger.debug("SKGraph connected: %s / %s", self.url, self.graph_name)
101
+ return True
102
+ except Exception as exc:
103
+ logger.warning("SKGraph connection failed: %s", exc)
104
+ return False
105
+
106
+ # ─────────────────────────────────────────────────────────
107
+ # Write operations
108
+ # ─────────────────────────────────────────────────────────
109
+
110
+ def save(self, memory: Memory) -> str:
111
+ """Store a memory node with properties in the graph.
112
+
113
+ Creates or updates the Memory node and its edges: TAGGED,
114
+ FROM_SOURCE, RELATED_TO, PROMOTED_FROM, PRECEDED_BY, and PLANTED
115
+ for seed memories created by AI instances.
116
+
117
+ This is a thin wrapper around :meth:`index_memory` that also
118
+ returns the memory ID, matching the convention used by other
119
+ backends.
120
+
121
+ Args:
122
+ memory: The Memory object to store as a graph node.
123
+
124
+ Returns:
125
+ str: The memory ID (unchanged).
126
+ """
127
+ self.index_memory(memory)
128
+ return memory.id
129
+
130
+ def index_memory(self, memory: Memory) -> bool:
131
+ """Add a memory node and all its relationships to the graph.
132
+
133
+ Graph edges created:
134
+
135
+ * ``(Memory)-[:TAGGED]->(Tag)`` — one per tag
136
+ * ``(Memory)-[:FROM_SOURCE]->(Source)`` — the origin system
137
+ * ``(Memory)-[:RELATED_TO]->(Memory)`` — explicit related_ids
138
+ * ``(Memory)-[:PROMOTED_FROM]->(Memory)`` — if parent_id is set
139
+ * ``(Memory)-[:PRECEDED_BY]->(Memory)`` — the previous memory
140
+ from the same source (temporal chain)
141
+ * ``(AI)-[:PLANTED]->(Memory)`` — for seed memories with a
142
+ ``creator:<name>`` tag
143
+
144
+ After creating tag edges, any existing memories with 2+ shared
145
+ tags are automatically linked via ``RELATED_TO``.
146
+
147
+ Args:
148
+ memory: The memory to index.
149
+
150
+ Returns:
151
+ bool: True if indexed successfully, False on failure.
152
+ """
153
+ if not self._ensure_initialized():
154
+ return False
155
+
156
+ try:
157
+ # Core Memory node upsert
158
+ self._graph.query(
159
+ Q.UPSERT_MEMORY,
160
+ {
161
+ "id": memory.id,
162
+ "title": memory.title,
163
+ "layer": memory.layer.value,
164
+ "source": memory.source,
165
+ "source_ref": memory.source_ref,
166
+ "intensity": memory.emotional.intensity,
167
+ "valence": memory.emotional.valence,
168
+ "created_at": memory.created_at,
169
+ "updated_at": memory.updated_at,
170
+ },
171
+ )
172
+
173
+ # PROMOTED_FROM edge (promotion lineage)
174
+ if memory.parent_id:
175
+ self._graph.query(
176
+ Q.CREATE_PROMOTED_FROM,
177
+ {"child_id": memory.id, "parent_id": memory.parent_id},
178
+ )
179
+
180
+ # RELATED_TO edges (explicit relationships)
181
+ for related_id in memory.related_ids:
182
+ self._graph.query(
183
+ Q.CREATE_RELATED_TO,
184
+ {"a_id": memory.id, "b_id": related_id},
185
+ )
186
+
187
+ # TAGGED edges (one per tag)
188
+ for tag in memory.tags:
189
+ self._graph.query(
190
+ Q.CREATE_TAGGED,
191
+ {"mem_id": memory.id, "tag": tag},
192
+ )
193
+
194
+ # Auto-wire shared-tag neighbours (overlap >= 2)
195
+ self._graph.query(
196
+ Q.CREATE_SHARED_TAG_RELATED,
197
+ {"a_id": memory.id},
198
+ )
199
+
200
+ # FROM_SOURCE edge
201
+ self._graph.query(
202
+ Q.CREATE_FROM_SOURCE,
203
+ {"mem_id": memory.id, "source": memory.source},
204
+ )
205
+
206
+ # PRECEDED_BY temporal edge — link to the most recent prior
207
+ # memory from the same source so sessions form a chain.
208
+ prev_result = self._graph.query(
209
+ Q.FIND_PREVIOUS_FROM_SOURCE,
210
+ {"source": memory.source, "exclude_id": memory.id},
211
+ )
212
+ if prev_result.result_set:
213
+ prev_id = prev_result.result_set[0][0]
214
+ self._graph.query(
215
+ Q.CREATE_PRECEDED_BY,
216
+ {"later_id": memory.id, "earlier_id": prev_id},
217
+ )
218
+
219
+ # PLANTED edge for AI seed memories
220
+ if memory.source == "seed":
221
+ creator = next(
222
+ (
223
+ t.split(":", 1)[1]
224
+ for t in memory.tags
225
+ if t.startswith("creator:")
226
+ ),
227
+ None,
228
+ )
229
+ if creator:
230
+ self._graph.query(
231
+ Q.CREATE_PLANTED,
232
+ {"mem_id": memory.id, "creator": creator},
233
+ )
234
+
235
+ return True
236
+ except Exception as exc:
237
+ logger.warning("SKGraph index failed: %s", exc)
238
+ return False
239
+
240
+ # ─────────────────────────────────────────────────────────
241
+ # Read operations
242
+ # ─────────────────────────────────────────────────────────
243
+
244
+ def get(self, memory_id: str) -> Optional[dict]:
245
+ """Retrieve the graph node properties for a memory by ID.
246
+
247
+ Returns only the properties stored in the graph (no full content).
248
+ For the full Memory object use the primary backend.
249
+
250
+ Args:
251
+ memory_id: The memory's unique identifier.
252
+
253
+ Returns:
254
+ Optional[dict]: Node properties if found, None otherwise.
255
+ """
256
+ if not self._ensure_initialized():
257
+ return None
258
+
259
+ try:
260
+ result = self._graph.query(
261
+ Q.GET_MEMORY_BY_ID,
262
+ {"id": memory_id},
263
+ )
264
+ if not result.result_set:
265
+ return None
266
+ row = result.result_set[0]
267
+ return {
268
+ "id": row[0],
269
+ "title": row[1],
270
+ "layer": row[2],
271
+ "source": row[3],
272
+ "source_ref": row[4],
273
+ "intensity": row[5],
274
+ "valence": row[6],
275
+ "created_at": row[7],
276
+ "updated_at": row[8],
277
+ }
278
+ except Exception as exc:
279
+ logger.warning("SKGraph get failed: %s", exc)
280
+ return None
281
+
282
+ def search(self, query: str, limit: int = 10) -> list[dict]:
283
+ """Full-text search on memory titles stored in the graph.
284
+
285
+ Performs a case-insensitive substring match against the ``title``
286
+ property of all Memory nodes. For full-content search use the
287
+ primary backend or the Qdrant vector backend.
288
+
289
+ Args:
290
+ query: Search string (case-insensitive substring match).
291
+ limit: Maximum number of results to return.
292
+
293
+ Returns:
294
+ list[dict]: Matching memory node stubs, sorted by
295
+ emotional intensity descending.
296
+ """
297
+ if not self._ensure_initialized():
298
+ return []
299
+
300
+ try:
301
+ result = self._graph.query(
302
+ Q.SEARCH_BY_TITLE,
303
+ {"query": query, "limit": limit},
304
+ )
305
+ return [
306
+ {
307
+ "id": row[0],
308
+ "title": row[1],
309
+ "layer": row[2],
310
+ "intensity": row[3],
311
+ "created_at": row[4],
312
+ }
313
+ for row in result.result_set
314
+ ]
315
+ except Exception as exc:
316
+ logger.warning("SKGraph search failed: %s", exc)
317
+ return []
318
+
319
+ def search_by_tags(self, tags: list[str], limit: int = 20) -> list[dict]:
320
+ """Find memories sharing any of the given tags via graph edges.
321
+
322
+ Args:
323
+ tags: Tag names to search for (OR logic — any match).
324
+ limit: Maximum results.
325
+
326
+ Returns:
327
+ list[dict]: Matching memory nodes with tag overlap count.
328
+ """
329
+ if not self._ensure_initialized():
330
+ return []
331
+
332
+ if not tags:
333
+ return []
334
+
335
+ try:
336
+ result = self._graph.query(
337
+ Q.SEARCH_BY_TAGS,
338
+ {"tags": tags, "limit": limit},
339
+ )
340
+ return [
341
+ {
342
+ "id": row[0],
343
+ "title": row[1],
344
+ "layer": row[2],
345
+ "intensity": row[3],
346
+ "matched_tags": row[4],
347
+ "tag_overlap": row[5],
348
+ }
349
+ for row in result.result_set
350
+ ]
351
+ except Exception as exc:
352
+ logger.warning("SKGraph tag search failed: %s", exc)
353
+ return []
354
+
355
+ def delete(self, memory_id: str) -> bool:
356
+ """Remove a memory node and all its edges from the graph.
357
+
358
+ This is an alias for :meth:`remove_memory` using the task-specified
359
+ method name. Calls ``DETACH DELETE`` so all incident edges are
360
+ removed atomically with the node.
361
+
362
+ Args:
363
+ memory_id: The memory ID to delete.
364
+
365
+ Returns:
366
+ bool: True if the deletion query ran successfully.
367
+ """
368
+ return self.remove_memory(memory_id)
369
+
370
+ def remove_memory(self, memory_id: str) -> bool:
371
+ """Remove a memory node and all its relationships from the graph.
372
+
373
+ Args:
374
+ memory_id: The memory ID to remove.
375
+
376
+ Returns:
377
+ bool: True if removed successfully.
378
+ """
379
+ if not self._ensure_initialized():
380
+ return False
381
+
382
+ try:
383
+ self._graph.query(
384
+ Q.DELETE_MEMORY,
385
+ {"id": memory_id},
386
+ )
387
+ return True
388
+ except Exception as exc:
389
+ logger.warning("SKGraph remove failed: %s", exc)
390
+ return False
391
+
392
+ # ─────────────────────────────────────────────────────────
393
+ # Graph traversal
394
+ # ─────────────────────────────────────────────────────────
395
+
396
+ def traverse(self, memory_id: str, depth: int = 2) -> list[dict]:
397
+ """Traverse the graph to find memories connected to a starting node.
398
+
399
+ Follows any edge type up to ``depth`` hops from the starting
400
+ memory. Results are sorted by hop distance (closest first) then
401
+ by emotional intensity descending.
402
+
403
+ Args:
404
+ memory_id: Starting memory ID.
405
+ depth: Maximum traversal depth (1–5, clamped).
406
+
407
+ Returns:
408
+ list[dict]: Connected memory stubs with ``id``, ``title``,
409
+ ``layer``, ``intensity``, and ``distance`` (hop count).
410
+ """
411
+ return self.get_related(memory_id, depth=depth)
412
+
413
+ def get_related(self, memory_id: str, depth: int = 2) -> list[dict]:
414
+ """Traverse the graph to find related memories by hop distance.
415
+
416
+ Args:
417
+ memory_id: Starting memory ID.
418
+ depth: How many hops to traverse (1–5, clamped).
419
+
420
+ Returns:
421
+ list[dict]: Related memory nodes with relationship info.
422
+ """
423
+ if not self._ensure_initialized():
424
+ return []
425
+
426
+ safe_depth = max(1, min(depth, 5))
427
+ try:
428
+ result = self._graph.query(
429
+ Q.TRAVERSE_RELATED.format(depth=safe_depth),
430
+ {"id": memory_id},
431
+ )
432
+ return [
433
+ {
434
+ "id": row[0],
435
+ "title": row[1],
436
+ "layer": row[2],
437
+ "intensity": row[3],
438
+ "distance": row[4],
439
+ }
440
+ for row in result.result_set
441
+ ]
442
+ except Exception as exc:
443
+ logger.warning("SKGraph traversal failed: %s", exc)
444
+ return []
445
+
446
+ def get_lineage(self, memory_id: str) -> list[dict]:
447
+ """Get the promotion / seed lineage chain for a memory.
448
+
449
+ Walks ``PROMOTED_FROM`` edges upward to recover the full
450
+ ancestry of a promoted memory.
451
+
452
+ Args:
453
+ memory_id: Starting memory ID.
454
+
455
+ Returns:
456
+ list[dict]: Chain of ancestor memories with ``depth`` field.
457
+ """
458
+ if not self._ensure_initialized():
459
+ return []
460
+
461
+ try:
462
+ result = self._graph.query(
463
+ Q.TRAVERSE_LINEAGE,
464
+ {"id": memory_id},
465
+ )
466
+ return [
467
+ {
468
+ "id": row[0],
469
+ "title": row[1],
470
+ "layer": row[2],
471
+ "depth": row[3],
472
+ }
473
+ for row in result.result_set
474
+ ]
475
+ except Exception as exc:
476
+ logger.warning("SKGraph lineage query failed: %s", exc)
477
+ return []
478
+
479
+ # ─────────────────────────────────────────────────────────
480
+ # Cluster discovery
481
+ # ─────────────────────────────────────────────────────────
482
+
483
+ def find_clusters(self, min_size: int = 3) -> list[dict]:
484
+ """Find memory clusters by discovering highly connected hub nodes.
485
+
486
+ A cluster is defined as a Memory node with at least ``min_size``
487
+ direct neighbours (any edge type). Returns each hub with the
488
+ count of its connections so callers can rank by centrality.
489
+
490
+ Args:
491
+ min_size: Minimum number of direct neighbours for a node to
492
+ be considered a cluster hub (default: 3).
493
+
494
+ Returns:
495
+ list[dict]: Cluster hubs with ``id``, ``title``, ``layer``,
496
+ and ``connections`` count, ordered by connections desc.
497
+ """
498
+ return self.get_memory_clusters(min_connections=min_size)
499
+
500
+ def get_memory_clusters(self, min_connections: int = 2) -> list[dict]:
501
+ """Find clusters of highly connected memories.
502
+
503
+ Args:
504
+ min_connections: Minimum edges to be considered a cluster centre.
505
+
506
+ Returns:
507
+ list[dict]: Cluster centres with connection counts.
508
+ """
509
+ if not self._ensure_initialized():
510
+ return []
511
+
512
+ try:
513
+ result = self._graph.query(
514
+ Q.FIND_CLUSTER_HUBS,
515
+ {"min_connections": min_connections},
516
+ )
517
+ return [
518
+ {
519
+ "id": row[0],
520
+ "title": row[1],
521
+ "layer": row[2],
522
+ "connections": row[3],
523
+ }
524
+ for row in result.result_set
525
+ ]
526
+ except Exception as exc:
527
+ logger.warning("SKGraph cluster query failed: %s", exc)
528
+ return []
529
+
530
+ # ─────────────────────────────────────────────────────────
531
+ # Introspection
532
+ # ─────────────────────────────────────────────────────────
533
+
534
+ def stats(self) -> dict:
535
+ """Return graph statistics: node count, edge count, tag distribution.
536
+
537
+ Returns:
538
+ dict: Statistics with keys ``node_count``, ``edge_count``,
539
+ ``memory_count``, ``tag_distribution`` (list of
540
+ ``{tag, memory_count}`` dicts), and ``ok`` bool.
541
+ """
542
+ if not self._ensure_initialized():
543
+ return {"ok": False, "error": "Not initialized"}
544
+
545
+ try:
546
+ node_result = self._graph.query(Q.COUNT_NODES)
547
+ node_count = (
548
+ node_result.result_set[0][0] if node_result.result_set else 0
549
+ )
550
+
551
+ edge_result = self._graph.query(Q.COUNT_EDGES)
552
+ edge_count = (
553
+ edge_result.result_set[0][0] if edge_result.result_set else 0
554
+ )
555
+
556
+ mem_result = self._graph.query(Q.COUNT_MEMORIES)
557
+ memory_count = (
558
+ mem_result.result_set[0][0] if mem_result.result_set else 0
559
+ )
560
+
561
+ tag_result = self._graph.query(Q.TAG_DISTRIBUTION)
562
+ tag_distribution = [
563
+ {"tag": row[0], "memory_count": row[1]}
564
+ for row in tag_result.result_set
565
+ ]
566
+
567
+ return {
568
+ "ok": True,
569
+ "node_count": node_count,
570
+ "edge_count": edge_count,
571
+ "memory_count": memory_count,
572
+ "tag_distribution": tag_distribution,
573
+ }
574
+ except Exception as exc:
575
+ logger.warning("SKGraph stats failed: %s", exc)
576
+ return {"ok": False, "error": str(exc)}
577
+
578
+ def health_check(self) -> dict:
579
+ """Check FalkorDB backend connectivity and graph size.
580
+
581
+ Returns:
582
+ dict: Status with ``ok``, ``backend``, ``url``, ``graph``,
583
+ and ``node_count``. On failure returns ``ok: False``
584
+ with an ``error`` key.
585
+ """
586
+ if not self._ensure_initialized():
587
+ return {
588
+ "ok": False,
589
+ "backend": "SKGraphBackend",
590
+ "error": "Not initialized",
591
+ }
592
+
593
+ try:
594
+ result = self._graph.query(Q.COUNT_NODES)
595
+ node_count = result.result_set[0][0] if result.result_set else 0
596
+ return {
597
+ "ok": True,
598
+ "backend": "SKGraphBackend",
599
+ "url": self.url,
600
+ "graph": self.graph_name,
601
+ "node_count": node_count,
602
+ }
603
+ except Exception as exc:
604
+ return {
605
+ "ok": False,
606
+ "backend": "SKGraphBackend",
607
+ "error": str(exc),
608
+ }