superlocalmemory 3.4.0 → 3.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/README.md +7 -8
  2. package/docs/screenshots/01-dashboard-main.png +0 -0
  3. package/docs/screenshots/02-knowledge-graph.png +0 -0
  4. package/docs/screenshots/03-patterns-learning.png +0 -0
  5. package/docs/screenshots/04-learning-dashboard.png +0 -0
  6. package/docs/screenshots/05-behavioral-analysis.png +0 -0
  7. package/docs/screenshots/06-graph-communities.png +0 -0
  8. package/package.json +2 -2
  9. package/pyproject.toml +1 -1
  10. package/src/superlocalmemory/core/engine_wiring.py +5 -1
  11. package/src/superlocalmemory/core/graph_analyzer.py +254 -12
  12. package/src/superlocalmemory/learning/consolidation_worker.py +240 -52
  13. package/src/superlocalmemory/retrieval/entity_channel.py +135 -4
  14. package/src/superlocalmemory/retrieval/spreading_activation.py +45 -0
  15. package/src/superlocalmemory/server/api.py +9 -1
  16. package/src/superlocalmemory/server/routes/behavioral.py +8 -4
  17. package/src/superlocalmemory/server/routes/chat.py +320 -0
  18. package/src/superlocalmemory/server/routes/insights.py +368 -0
  19. package/src/superlocalmemory/server/routes/learning.py +106 -6
  20. package/src/superlocalmemory/server/routes/memories.py +20 -9
  21. package/src/superlocalmemory/server/routes/stats.py +25 -3
  22. package/src/superlocalmemory/server/routes/timeline.py +252 -0
  23. package/src/superlocalmemory/server/routes/v3_api.py +161 -0
  24. package/src/superlocalmemory/server/ui.py +8 -0
  25. package/src/superlocalmemory/ui/index.html +168 -58
  26. package/src/superlocalmemory/ui/js/graph-event-bus.js +83 -0
  27. package/src/superlocalmemory/ui/js/graph-filters.js +1 -1
  28. package/src/superlocalmemory/ui/js/knowledge-graph.js +942 -0
  29. package/src/superlocalmemory/ui/js/memory-chat.js +344 -0
  30. package/src/superlocalmemory/ui/js/memory-timeline.js +265 -0
  31. package/src/superlocalmemory/ui/js/quick-actions.js +334 -0
  32. package/src/superlocalmemory.egg-info/PKG-INFO +17 -14
  33. package/src/superlocalmemory.egg-info/SOURCES.txt +8 -0
package/README.md CHANGED
@@ -341,20 +341,19 @@ slm dashboard # Opens at http://localhost:8765
341
341
 
342
342
  <details open>
343
343
  <summary><strong>Dashboard Screenshots</strong> (click to collapse)</summary>
344
- <p align="center"><img src="docs/screenshots/01-dashboard-main.png" alt="Dashboard" width="600"/></p>
344
+ <p align="center"><img src="docs/screenshots/01-dashboard-main.png" alt="Dashboard Overview — 3,100+ memories, 430K connections" width="600"/></p>
345
345
  <p align="center">
346
- <img src="docs/screenshots/02-knowledge-graph.png" alt="Graph" width="190"/>
347
- <img src="docs/screenshots/03-math-health.png" alt="Math" width="190"/>
348
- <img src="docs/screenshots/05-trust-dashboard.png" alt="Trust" width="190"/>
346
+ <img src="docs/screenshots/02-knowledge-graph.png" alt="Knowledge Graph — Sigma.js WebGL with community detection, chat, quick actions, timeline" width="290"/>
347
+ <img src="docs/screenshots/06-graph-communities.png" alt="Graph Communities — Louvain clustering with colored nodes" width="290"/>
349
348
  </p>
350
349
  <p align="center">
351
- <img src="docs/screenshots/04-recall-lab.png" alt="Recall" width="190"/>
352
- <img src="docs/screenshots/06-settings.png" alt="Settings" width="190"/>
353
- <img src="docs/screenshots/07-memories-blurred.png" alt="Memories" width="190"/>
350
+ <img src="docs/screenshots/03-patterns-learning.png" alt="Patterns — 50 learned behavioral patterns with confidence bars" width="190"/>
351
+ <img src="docs/screenshots/04-learning-dashboard.png" alt="Learning — 722 signals, ML Model phase, tech preferences" width="190"/>
352
+ <img src="docs/screenshots/05-behavioral-analysis.png" alt="Behavioral — pattern analysis with confidence distribution" width="190"/>
354
353
  </p>
355
354
  </details>
356
355
 
357
- 23 tabs: Dashboard, Recall Lab, Knowledge Graph, Memories, Trust Scores, Math Health, Compliance, Learning, IDE Connections, Settings, Memory Lifecycle, Compression, Patterns, and more. Runs locally — no data leaves your machine.
356
+ **v3.4.1 Visual Intelligence:** Sigma.js WebGL knowledge graph with community detection (Louvain/Leiden), 5 quick insight actions, D3 memory timeline, graph-enhanced retrieval (PageRank bias + community boost + contradiction suppression), and 56 auto-mined behavioral patterns. 23+ tabs. Runs locally — no data leaves your machine.
358
357
 
359
358
  ---
360
359
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "superlocalmemory",
3
- "version": "3.4.0",
3
+ "version": "3.4.1",
4
4
  "description": "Information-geometric agent memory with mathematical guarantees. 4-channel retrieval, Fisher-Rao similarity, zero-LLM mode, EU AI Act compliant. Works with Claude, Cursor, Windsurf, and 17+ AI tools.",
5
5
  "keywords": [
6
6
  "ai-memory",
@@ -89,4 +89,4 @@
89
89
  "dependencies": {
90
90
  "docx": "^9.5.1"
91
91
  }
92
- }
92
+ }
package/pyproject.toml CHANGED
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "superlocalmemory"
3
- version = "3.4.0"
3
+ version = "3.4.1"
4
4
  description = "Information-geometric agent memory with mathematical guarantees"
5
5
  readme = "README.md"
6
6
  license = {text = "AGPL-3.0-or-later"}
@@ -364,7 +364,11 @@ def _init_spreading_activation(
364
364
  SpreadingActivation,
365
365
  SpreadingActivationConfig,
366
366
  )
367
- sa_config = SpreadingActivationConfig(enabled=True)
367
+ sa_config = SpreadingActivationConfig(
368
+ enabled=True,
369
+ use_pagerank_bias=True, # v3.4.1: PageRank-weighted propagation
370
+ community_boost=0.15, # v3.4.1: 15% boost for same-community nodes
371
+ )
368
372
  return SpreadingActivation(
369
373
  db=db, vector_store=vector_store, config=sa_config,
370
374
  )
@@ -8,13 +8,19 @@ Reads BOTH graph_edges and association_edges for the full graph picture.
8
8
  Stores results in fact_importance table.
9
9
  Called during consolidation (Phase 5), not at query time.
10
10
 
11
+ v3.4.1: Added Leiden community detection (optional), TF-IDF community labels,
12
+ bridge score detection. Frontend uses Louvain; backend uses Leiden/LP.
13
+
11
14
  Part of Qualixar | Author: Varun Pratap Bhardwaj
12
- License: Elastic-2.0
15
+ License: AGPL-3.0-or-later
13
16
  """
14
17
 
15
18
  from __future__ import annotations
16
19
 
20
+ import json
17
21
  import logging
22
+ from collections import Counter, defaultdict
23
+ from math import log
18
24
  from typing import Any
19
25
 
20
26
  logger = logging.getLogger(__name__)
@@ -37,7 +43,11 @@ class GraphAnalyzer:
37
43
  def compute_and_store(self, profile_id: str) -> dict[str, Any]:
38
44
  """Run all analyses and persist to fact_importance.
39
45
 
40
- Returns summary dict with node_count, community_count, top_5_nodes.
46
+ v3.4.1: Now uses Leiden (falls back to Label Propagation),
47
+ generates TF-IDF community labels, computes bridge scores.
48
+
49
+ Returns summary dict with node_count, community_count, top_5_nodes,
50
+ bridge_count, top_bridge_nodes, community_labels.
41
51
  """
42
52
  try:
43
53
  graph = self._build_networkx_graph(profile_id)
@@ -50,22 +60,64 @@ class GraphAnalyzer:
50
60
  }
51
61
 
52
62
  pagerank = self.compute_pagerank(graph)
53
- communities = self.detect_communities(graph)
63
+ communities = self.detect_communities_leiden(graph, profile_id)
54
64
  centrality = self._compute_degree_centrality(graph)
65
+ bridge_scores = self.compute_bridge_scores(graph)
66
+ labels = self.compute_community_labels(profile_id, communities)
55
67
 
56
- # Persist to fact_importance
68
+ # v3.4.1: Ensure bridge_score column exists (idempotent migration)
69
+ try:
70
+ columns = self._db.execute(
71
+ "PRAGMA table_info(fact_importance)", (),
72
+ )
73
+ has_bridge = any(
74
+ dict(c).get("name") == "bridge_score" for c in columns
75
+ )
76
+ if not has_bridge:
77
+ self._db.execute(
78
+ "ALTER TABLE fact_importance "
79
+ "ADD COLUMN bridge_score REAL DEFAULT 0.0",
80
+ (),
81
+ )
82
+ except Exception:
83
+ pass
84
+
85
+ # Persist to fact_importance (with bridge_score)
57
86
  for node_id in graph.nodes():
58
87
  pr_score = pagerank.get(node_id, 0.0)
59
88
  comm_id = communities.get(node_id)
60
89
  deg_cent = centrality.get(node_id, 0.0)
61
- self._db.execute(
62
- "INSERT OR REPLACE INTO fact_importance "
63
- "(fact_id, profile_id, pagerank_score, community_id, "
64
- " degree_centrality, computed_at) "
65
- "VALUES (?, ?, ?, ?, ?, datetime('now'))",
66
- (node_id, profile_id, round(pr_score, 6),
67
- comm_id, round(deg_cent, 4)),
68
- )
90
+ br_score = bridge_scores.get(node_id, 0.0)
91
+ try:
92
+ self._db.execute(
93
+ "INSERT OR REPLACE INTO fact_importance "
94
+ "(fact_id, profile_id, pagerank_score, community_id, "
95
+ " degree_centrality, bridge_score, computed_at) "
96
+ "VALUES (?, ?, ?, ?, ?, ?, datetime('now'))",
97
+ (node_id, profile_id, round(pr_score, 6),
98
+ comm_id, round(deg_cent, 4),
99
+ round(br_score, 6)),
100
+ )
101
+ except Exception:
102
+ # Fallback without bridge_score if column doesn't exist
103
+ self._db.execute(
104
+ "INSERT OR REPLACE INTO fact_importance "
105
+ "(fact_id, profile_id, pagerank_score, community_id, "
106
+ " degree_centrality, computed_at) "
107
+ "VALUES (?, ?, ?, ?, ?, datetime('now'))",
108
+ (node_id, profile_id, round(pr_score, 6),
109
+ comm_id, round(deg_cent, 4)),
110
+ )
111
+
112
+ # v3.4.1: Persist community labels to JSON sidecar
113
+ try:
114
+ from pathlib import Path as _Path
115
+ labels_dir = _Path.home() / ".superlocalmemory"
116
+ labels_dir.mkdir(parents=True, exist_ok=True)
117
+ labels_path = labels_dir / f"{profile_id}_community_labels.json"
118
+ labels_path.write_text(json.dumps(labels, indent=2))
119
+ except Exception:
120
+ pass
69
121
 
70
122
  top_5 = sorted(
71
123
  pagerank.items(), key=lambda x: x[1], reverse=True,
@@ -74,6 +126,20 @@ class GraphAnalyzer:
74
126
  set(c for c in communities.values() if c is not None),
75
127
  )
76
128
 
129
+ bridge_count = len(
130
+ [s for s in bridge_scores.values() if s > 0.1],
131
+ )
132
+ top_bridges = sorted(
133
+ bridge_scores.items(), key=lambda x: -x[1],
134
+ )[:5]
135
+
136
+ logger.info(
137
+ "GraphAnalyzer: %d nodes, %d communities, %d bridges, "
138
+ "labels=%s",
139
+ graph.number_of_nodes(), unique_communities,
140
+ bridge_count, labels,
141
+ )
142
+
77
143
  return {
78
144
  "node_count": graph.number_of_nodes(),
79
145
  "edge_count": graph.number_of_edges(),
@@ -81,6 +147,11 @@ class GraphAnalyzer:
81
147
  "top_5_nodes": [
82
148
  (nid, round(score, 4)) for nid, score in top_5
83
149
  ],
150
+ "bridge_count": bridge_count,
151
+ "top_bridge_nodes": [
152
+ (nid, round(s, 4)) for nid, s in top_bridges
153
+ ],
154
+ "community_labels": labels,
84
155
  }
85
156
  except Exception as exc:
86
157
  logger.debug("GraphAnalyzer.compute_and_store failed: %s", exc)
@@ -140,6 +211,177 @@ class GraphAnalyzer:
140
211
  result[node] = comm_id
141
212
  return result
142
213
 
214
+ # ── v3.4.1: Leiden Community Detection ────────────────────────
215
+
216
+ def detect_communities_leiden(
217
+ self,
218
+ graph: Any = None,
219
+ profile_id: str = "",
220
+ resolution: float = 1.0,
221
+ ) -> dict[str, int]:
222
+ """Leiden community detection (higher quality than Label Propagation).
223
+
224
+ Falls back to detect_communities() (Label Propagation) if
225
+ leidenalg or igraph are not installed.
226
+ """
227
+ if graph is None:
228
+ graph = self._build_networkx_graph(profile_id)
229
+ if graph.number_of_nodes() == 0:
230
+ return {}
231
+
232
+ try:
233
+ import leidenalg
234
+ import igraph
235
+ except ImportError:
236
+ logger.info(
237
+ "leidenalg not installed, using Label Propagation fallback",
238
+ )
239
+ return self.detect_communities(graph, profile_id)
240
+
241
+ # Convert DiGraph -> undirected -> igraph
242
+ undirected = graph.to_undirected()
243
+ node_list = list(undirected.nodes())
244
+ node_index = {n: i for i, n in enumerate(node_list)}
245
+
246
+ ig = igraph.Graph(n=len(node_list), directed=False)
247
+ edges = []
248
+ weights = []
249
+ for u, v in undirected.edges():
250
+ if u in node_index and v in node_index:
251
+ edges.append((node_index[u], node_index[v]))
252
+ weights.append(undirected[u][v].get("weight", 1.0))
253
+
254
+ ig.add_edges(edges)
255
+ ig.es["weight"] = weights
256
+ ig.simplify(combine_edges={"weight": "max"})
257
+
258
+ partition = leidenalg.find_partition(
259
+ ig,
260
+ leidenalg.RBConfigurationVertexPartition,
261
+ resolution_parameter=resolution,
262
+ weights="weight",
263
+ )
264
+
265
+ result: dict[str, int] = {}
266
+ for idx, comm_id in enumerate(partition.membership):
267
+ result[node_list[idx]] = comm_id
268
+
269
+ logger.info(
270
+ "Leiden detected %d communities (resolution=%.1f)",
271
+ len(set(result.values())), resolution,
272
+ )
273
+ return result
274
+
275
+ # ── v3.4.1: TF-IDF Community Labels ─────────────────────────
276
+
277
+ def compute_community_labels(
278
+ self,
279
+ profile_id: str,
280
+ communities: dict[str, int],
281
+ ) -> dict[int, str]:
282
+ """Generate human-readable labels via TF-IDF on fact content.
283
+
284
+ Returns dict mapping community_id to label string.
285
+ Labels stored in config table for API access.
286
+ """
287
+ if not communities:
288
+ return {}
289
+
290
+ # Group fact_ids by community
291
+ comm_facts: dict[int, list[str]] = defaultdict(list)
292
+ for fact_id, comm_id in communities.items():
293
+ comm_facts[comm_id].append(fact_id)
294
+
295
+ stopwords = frozenset({
296
+ "the", "a", "an", "is", "was", "were", "are", "be", "been",
297
+ "being", "have", "has", "had", "do", "does", "did", "will",
298
+ "would", "could", "should", "may", "might", "shall", "can",
299
+ "to", "of", "in", "for", "on", "with", "at", "by", "from",
300
+ "as", "into", "through", "during", "before", "after", "above",
301
+ "below", "between", "and", "but", "or", "not", "no", "nor",
302
+ "so", "yet", "both", "either", "neither", "this", "that",
303
+ "these", "those", "it", "its", "they", "them", "their",
304
+ "he", "she", "his", "her", "we", "our", "you", "your",
305
+ "i", "my", "me",
306
+ })
307
+
308
+ # Fetch content for each community
309
+ tf_per_comm: dict[int, Counter] = {}
310
+ for comm_id, fact_ids in comm_facts.items():
311
+ placeholders = ",".join("?" * len(fact_ids))
312
+ sql = (
313
+ "SELECT content FROM atomic_facts WHERE fact_id IN ("
314
+ + placeholders
315
+ + ") AND profile_id = ?"
316
+ )
317
+ try:
318
+ rows = self._db.execute(sql, (*fact_ids, profile_id))
319
+ texts = [dict(r).get("content", "") for r in rows]
320
+ except Exception:
321
+ texts = []
322
+
323
+ tokens: list[str] = []
324
+ for text in texts:
325
+ for word in text.lower().split():
326
+ w = word.strip(".,;:!?\"'()[]{}")
327
+ if len(w) > 2 and w not in stopwords:
328
+ tokens.append(w)
329
+ tf_per_comm[comm_id] = Counter(tokens)
330
+
331
+ num_communities = len(comm_facts)
332
+ labels: dict[int, str] = {}
333
+
334
+ if num_communities == 1:
335
+ # Single community: use raw term frequency
336
+ for comm_id, tf in tf_per_comm.items():
337
+ top = [w for w, _ in tf.most_common(3)]
338
+ labels[comm_id] = ", ".join(top) if top else f"Community {comm_id}"
339
+ else:
340
+ # Compute IDF across communities
341
+ doc_freq: Counter = Counter()
342
+ for tf in tf_per_comm.values():
343
+ for term in tf:
344
+ doc_freq[term] += 1
345
+
346
+ for comm_id, tf in tf_per_comm.items():
347
+ scored = []
348
+ for term, count in tf.items():
349
+ idf = log(1 + num_communities / (1 + doc_freq[term]))
350
+ scored.append((term, count * idf))
351
+ scored.sort(key=lambda x: x[1], reverse=True)
352
+ top = [w for w, _ in scored[:3]]
353
+ labels[comm_id] = ", ".join(top) if top else f"Community {comm_id}"
354
+
355
+ # Store in config table
356
+ try:
357
+ key = "community_labels_" + profile_id
358
+ value = json.dumps(labels)
359
+ self._db.execute(
360
+ "INSERT OR REPLACE INTO config (key, value, updated_at) "
361
+ "VALUES (?, ?, datetime('now'))",
362
+ (key, value),
363
+ )
364
+ except Exception as exc:
365
+ logger.warning("Failed to store community labels: %s", exc)
366
+
367
+ return labels
368
+
369
+ # ── v3.4.1: Bridge Score Detection ───────────────────────────
370
+
371
+ def compute_bridge_scores(self, graph: Any) -> dict[str, float]:
372
+ """Identify bridge nodes via betweenness centrality.
373
+
374
+ Returns dict mapping node_id to bridge_score (0.0 to 1.0).
375
+ NOT persisted to DB (no column exists) -- used in summary only.
376
+ """
377
+ import networkx as nx
378
+
379
+ if graph.number_of_nodes() <= 2:
380
+ return {}
381
+ return nx.betweenness_centrality(
382
+ graph, weight="weight", normalized=True,
383
+ )
384
+
143
385
  def _compute_degree_centrality(
144
386
  self, graph: Any,
145
387
  ) -> dict[str, float]: