superlocalmemory 3.3.29 → 3.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ATTRIBUTION.md +1 -1
- package/CHANGELOG.md +3 -0
- package/LICENSE +633 -70
- package/README.md +14 -11
- package/docs/screenshots/01-dashboard-main.png +0 -0
- package/docs/screenshots/02-knowledge-graph.png +0 -0
- package/docs/screenshots/03-patterns-learning.png +0 -0
- package/docs/screenshots/04-learning-dashboard.png +0 -0
- package/docs/screenshots/05-behavioral-analysis.png +0 -0
- package/docs/screenshots/06-graph-communities.png +0 -0
- package/docs/v2-archive/ACCESSIBILITY.md +1 -1
- package/docs/v2-archive/FRAMEWORK-INTEGRATIONS.md +1 -1
- package/docs/v2-archive/MCP-MANUAL-SETUP.md +1 -1
- package/docs/v2-archive/SEARCH-ENGINE-V2.2.0.md +2 -2
- package/docs/v2-archive/SEARCH-INTEGRATION-GUIDE.md +1 -1
- package/docs/v2-archive/UNIVERSAL-INTEGRATION.md +1 -1
- package/docs/v2-archive/V2.2.0-OPTIONAL-SEARCH.md +1 -1
- package/docs/v2-archive/example_graph_usage.py +1 -1
- package/ide/configs/codex-mcp.toml +1 -1
- package/ide/integrations/langchain/README.md +1 -1
- package/ide/integrations/langchain/langchain_superlocalmemory/__init__.py +1 -1
- package/ide/integrations/langchain/langchain_superlocalmemory/chat_message_history.py +1 -1
- package/ide/integrations/langchain/pyproject.toml +2 -2
- package/ide/integrations/langchain/tests/__init__.py +1 -1
- package/ide/integrations/langchain/tests/test_chat_message_history.py +1 -1
- package/ide/integrations/langchain/tests/test_security.py +1 -1
- package/ide/integrations/llamaindex/llama_index/storage/chat_store/superlocalmemory/__init__.py +1 -1
- package/ide/integrations/llamaindex/llama_index/storage/chat_store/superlocalmemory/base.py +1 -1
- package/ide/integrations/llamaindex/pyproject.toml +2 -2
- package/ide/integrations/llamaindex/tests/__init__.py +1 -1
- package/ide/integrations/llamaindex/tests/test_chat_store.py +1 -1
- package/ide/integrations/llamaindex/tests/test_security.py +1 -1
- package/ide/skills/slm-build-graph/SKILL.md +3 -3
- package/ide/skills/slm-list-recent/SKILL.md +3 -3
- package/ide/skills/slm-recall/SKILL.md +3 -3
- package/ide/skills/slm-remember/SKILL.md +3 -3
- package/ide/skills/slm-show-patterns/SKILL.md +3 -3
- package/ide/skills/slm-status/SKILL.md +3 -3
- package/ide/skills/slm-switch-profile/SKILL.md +3 -3
- package/package.json +3 -3
- package/pyproject.toml +3 -3
- package/src/superlocalmemory/core/engine_wiring.py +5 -1
- package/src/superlocalmemory/core/graph_analyzer.py +254 -12
- package/src/superlocalmemory/learning/consolidation_worker.py +240 -52
- package/src/superlocalmemory/retrieval/entity_channel.py +135 -4
- package/src/superlocalmemory/retrieval/spreading_activation.py +45 -0
- package/src/superlocalmemory/server/api.py +9 -1
- package/src/superlocalmemory/server/routes/behavioral.py +8 -4
- package/src/superlocalmemory/server/routes/chat.py +320 -0
- package/src/superlocalmemory/server/routes/insights.py +368 -0
- package/src/superlocalmemory/server/routes/learning.py +106 -6
- package/src/superlocalmemory/server/routes/memories.py +20 -9
- package/src/superlocalmemory/server/routes/stats.py +25 -3
- package/src/superlocalmemory/server/routes/timeline.py +252 -0
- package/src/superlocalmemory/server/routes/v3_api.py +161 -0
- package/src/superlocalmemory/server/ui.py +8 -0
- package/src/superlocalmemory/ui/index.html +168 -58
- package/src/superlocalmemory/ui/js/graph-event-bus.js +83 -0
- package/src/superlocalmemory/ui/js/graph-filters.js +1 -1
- package/src/superlocalmemory/ui/js/knowledge-graph.js +942 -0
- package/src/superlocalmemory/ui/js/memory-chat.js +344 -0
- package/src/superlocalmemory/ui/js/memory-timeline.js +265 -0
- package/src/superlocalmemory/ui/js/quick-actions.js +334 -0
- package/src/superlocalmemory.egg-info/PKG-INFO +597 -0
- package/src/superlocalmemory.egg-info/SOURCES.txt +287 -0
- package/src/superlocalmemory.egg-info/dependency_links.txt +1 -0
- package/src/superlocalmemory.egg-info/entry_points.txt +2 -0
- package/src/superlocalmemory.egg-info/requires.txt +47 -0
- package/src/superlocalmemory.egg-info/top_level.txt +1 -0
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
# Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
|
|
2
|
+
# Licensed under AGPL-3.0-or-later — see LICENSE file
|
|
3
|
+
# Part of SuperLocalMemory v3.4.1 | https://qualixar.com
|
|
4
|
+
|
|
5
|
+
"""Memory Timeline API — unified event stream from 3 sources.
|
|
6
|
+
|
|
7
|
+
Merges atomic_facts, temporal_events, and consolidation_log
|
|
8
|
+
into a single time-ordered event list for D3 timeline visualization.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import logging
|
|
14
|
+
import re
|
|
15
|
+
import sqlite3
|
|
16
|
+
from typing import Any
|
|
17
|
+
|
|
18
|
+
from fastapi import APIRouter, Query
|
|
19
|
+
from fastapi.responses import JSONResponse
|
|
20
|
+
from superlocalmemory.server.routes.helpers import DB_PATH, get_active_profile
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
router = APIRouter(prefix="/api/v3/timeline", tags=["timeline"])
|
|
25
|
+
|
|
26
|
+
VALID_RANGES = {"1d", "7d", "30d", "90d", "365d"}
|
|
27
|
+
VALID_GROUP_BY = {"category", "community"}
|
|
28
|
+
INTERNAL_CEILING = 5000
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _parse_range(range_str: str) -> str | None:
|
|
32
|
+
"""Parse range like '7d' into SQLite modifier '-7 days'."""
|
|
33
|
+
m = re.match(r"^(\d+)d$", range_str)
|
|
34
|
+
if not m or range_str not in VALID_RANGES:
|
|
35
|
+
return None
|
|
36
|
+
return f"-{m.group(1)} days"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@router.get("/")
|
|
40
|
+
async def get_timeline(
|
|
41
|
+
range: str = "7d",
|
|
42
|
+
group_by: str = "category",
|
|
43
|
+
limit: int = Query(default=1000, ge=1, le=2000),
|
|
44
|
+
offset: int = Query(default=0, ge=0),
|
|
45
|
+
profile: str = "",
|
|
46
|
+
):
|
|
47
|
+
"""Get unified timeline events from all memory sources."""
|
|
48
|
+
modifier = _parse_range(range)
|
|
49
|
+
if modifier is None:
|
|
50
|
+
return JSONResponse(
|
|
51
|
+
{"error": f"Invalid range: '{range}'. Valid: {', '.join(sorted(VALID_RANGES))}"},
|
|
52
|
+
status_code=400,
|
|
53
|
+
)
|
|
54
|
+
if group_by not in VALID_GROUP_BY:
|
|
55
|
+
return JSONResponse(
|
|
56
|
+
{"error": f"Invalid group_by: '{group_by}'. Valid: {', '.join(sorted(VALID_GROUP_BY))}"},
|
|
57
|
+
status_code=400,
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
pid = profile or get_active_profile()
|
|
61
|
+
if not DB_PATH.exists():
|
|
62
|
+
return {"range": range, "group_by": group_by, "count": 0, "events": [], "total_available": 0, "offset": 0}
|
|
63
|
+
|
|
64
|
+
conn = sqlite3.connect(str(DB_PATH))
|
|
65
|
+
conn.row_factory = sqlite3.Row
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
start_date = conn.execute("SELECT datetime('now', ?)", (modifier,)).fetchone()[0]
|
|
69
|
+
except Exception:
|
|
70
|
+
conn.close()
|
|
71
|
+
return {"range": range, "group_by": group_by, "count": 0, "events": [], "total_available": 0, "offset": 0}
|
|
72
|
+
|
|
73
|
+
events: list[dict] = []
|
|
74
|
+
|
|
75
|
+
# 1. Atomic facts
|
|
76
|
+
try:
|
|
77
|
+
rows = conn.execute(
|
|
78
|
+
"SELECT fact_id, content, fact_type, "
|
|
79
|
+
"REPLACE(created_at, ' ', 'T') || 'Z' AS created_at, "
|
|
80
|
+
"confidence, session_id "
|
|
81
|
+
"FROM atomic_facts "
|
|
82
|
+
"WHERE profile_id = ? AND created_at >= ? AND lifecycle = 'active' "
|
|
83
|
+
"ORDER BY created_at DESC LIMIT ?",
|
|
84
|
+
(pid, start_date, INTERNAL_CEILING),
|
|
85
|
+
).fetchall()
|
|
86
|
+
for r in rows:
|
|
87
|
+
d = dict(r)
|
|
88
|
+
events.append({
|
|
89
|
+
"id": d["fact_id"],
|
|
90
|
+
"type": "fact_created",
|
|
91
|
+
"timestamp": d["created_at"],
|
|
92
|
+
"category": d.get("fact_type", "semantic"),
|
|
93
|
+
"community_id": None,
|
|
94
|
+
"content_preview": (d.get("content") or "")[:100],
|
|
95
|
+
"trust_score": None,
|
|
96
|
+
"lifecycle_zone": None,
|
|
97
|
+
"retention_score": None,
|
|
98
|
+
"session_id": d.get("session_id", ""),
|
|
99
|
+
"source": "atomic_facts",
|
|
100
|
+
})
|
|
101
|
+
except Exception as exc:
|
|
102
|
+
logger.debug("Timeline atomic_facts query failed: %s", exc)
|
|
103
|
+
|
|
104
|
+
# 2. Temporal events
|
|
105
|
+
try:
|
|
106
|
+
rows = conn.execute(
|
|
107
|
+
"SELECT event_id, entity_id, fact_id, "
|
|
108
|
+
"observation_date, referenced_date, interval_start, description, "
|
|
109
|
+
"REPLACE(COALESCE(observation_date, referenced_date, interval_start), ' ', 'T') || 'Z' "
|
|
110
|
+
"AS event_date "
|
|
111
|
+
"FROM temporal_events "
|
|
112
|
+
"WHERE profile_id = ? "
|
|
113
|
+
"AND COALESCE(observation_date, referenced_date, interval_start) >= ? "
|
|
114
|
+
"ORDER BY event_date DESC LIMIT ?",
|
|
115
|
+
(pid, start_date, INTERNAL_CEILING),
|
|
116
|
+
).fetchall()
|
|
117
|
+
for r in rows:
|
|
118
|
+
d = dict(r)
|
|
119
|
+
events.append({
|
|
120
|
+
"id": d.get("event_id", ""),
|
|
121
|
+
"type": "temporal_event",
|
|
122
|
+
"timestamp": d.get("event_date", ""),
|
|
123
|
+
"category": "temporal",
|
|
124
|
+
"community_id": None,
|
|
125
|
+
"content_preview": (d.get("description") or "")[:100],
|
|
126
|
+
"trust_score": None,
|
|
127
|
+
"lifecycle_zone": None,
|
|
128
|
+
"retention_score": None,
|
|
129
|
+
"session_id": None,
|
|
130
|
+
"source": "temporal_events",
|
|
131
|
+
})
|
|
132
|
+
except Exception as exc:
|
|
133
|
+
logger.debug("Timeline temporal_events query failed: %s", exc)
|
|
134
|
+
|
|
135
|
+
# 3. Consolidation log
|
|
136
|
+
try:
|
|
137
|
+
rows = conn.execute(
|
|
138
|
+
"SELECT action_id, action_type, new_fact_id, existing_fact_id, reason, "
|
|
139
|
+
"REPLACE(timestamp, ' ', 'T') || 'Z' AS timestamp "
|
|
140
|
+
"FROM consolidation_log "
|
|
141
|
+
"WHERE profile_id = ? AND timestamp >= ? "
|
|
142
|
+
"ORDER BY timestamp DESC LIMIT ?",
|
|
143
|
+
(pid, start_date, INTERNAL_CEILING),
|
|
144
|
+
).fetchall()
|
|
145
|
+
for r in rows:
|
|
146
|
+
d = dict(r)
|
|
147
|
+
preview = f"{d.get('action_type', '')}: {(d.get('reason') or '')[:80]}"
|
|
148
|
+
events.append({
|
|
149
|
+
"id": d.get("action_id", ""),
|
|
150
|
+
"type": "consolidation",
|
|
151
|
+
"timestamp": d.get("timestamp", ""),
|
|
152
|
+
"category": "consolidation",
|
|
153
|
+
"community_id": None,
|
|
154
|
+
"content_preview": preview[:100],
|
|
155
|
+
"trust_score": None,
|
|
156
|
+
"lifecycle_zone": None,
|
|
157
|
+
"retention_score": None,
|
|
158
|
+
"session_id": None,
|
|
159
|
+
"source": "consolidation_log",
|
|
160
|
+
})
|
|
161
|
+
except Exception as exc:
|
|
162
|
+
logger.debug("Timeline consolidation_log query failed: %s", exc)
|
|
163
|
+
|
|
164
|
+
# Sort merged events by timestamp desc
|
|
165
|
+
events.sort(key=lambda e: e.get("timestamp", ""), reverse=True)
|
|
166
|
+
total_available = len(events)
|
|
167
|
+
|
|
168
|
+
# Paginate
|
|
169
|
+
events = events[offset:offset + limit]
|
|
170
|
+
|
|
171
|
+
# Enrich with community_id if group_by=community
|
|
172
|
+
fact_ids = [e["id"] for e in events if e["source"] == "atomic_facts"]
|
|
173
|
+
if group_by == "community" and fact_ids:
|
|
174
|
+
_enrich_communities(conn, pid, fact_ids, events)
|
|
175
|
+
|
|
176
|
+
# Enrich with trust scores + retention
|
|
177
|
+
if fact_ids:
|
|
178
|
+
_enrich_trust(conn, pid, fact_ids, events)
|
|
179
|
+
_enrich_retention(conn, pid, fact_ids, events)
|
|
180
|
+
|
|
181
|
+
conn.close()
|
|
182
|
+
|
|
183
|
+
return {
|
|
184
|
+
"range": range,
|
|
185
|
+
"group_by": group_by,
|
|
186
|
+
"total_available": total_available,
|
|
187
|
+
"count": len(events),
|
|
188
|
+
"offset": offset,
|
|
189
|
+
"events": events,
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def _enrich_communities(
|
|
194
|
+
conn: sqlite3.Connection, pid: str, fact_ids: list[str], events: list[dict],
|
|
195
|
+
) -> None:
|
|
196
|
+
"""Add community_id from fact_importance."""
|
|
197
|
+
try:
|
|
198
|
+
placeholders = ",".join("?" * len(fact_ids))
|
|
199
|
+
sql = (
|
|
200
|
+
"SELECT fact_id, community_id, pagerank_score "
|
|
201
|
+
"FROM fact_importance WHERE profile_id = ? AND fact_id IN ("
|
|
202
|
+
+ placeholders + ")"
|
|
203
|
+
)
|
|
204
|
+
rows = conn.execute(sql, (pid, *fact_ids)).fetchall()
|
|
205
|
+
comm_map = {dict(r)["fact_id"]: dict(r).get("community_id") for r in rows}
|
|
206
|
+
for e in events:
|
|
207
|
+
if e["id"] in comm_map:
|
|
208
|
+
e["community_id"] = comm_map[e["id"]]
|
|
209
|
+
except Exception:
|
|
210
|
+
pass
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def _enrich_trust(
|
|
214
|
+
conn: sqlite3.Connection, pid: str, fact_ids: list[str], events: list[dict],
|
|
215
|
+
) -> None:
|
|
216
|
+
"""Add trust_score from trust_scores table."""
|
|
217
|
+
try:
|
|
218
|
+
placeholders = ",".join("?" * len(fact_ids))
|
|
219
|
+
sql = (
|
|
220
|
+
"SELECT target_id AS fact_id, trust_score "
|
|
221
|
+
"FROM trust_scores WHERE profile_id = ? AND target_type = 'fact' "
|
|
222
|
+
"AND target_id IN (" + placeholders + ")"
|
|
223
|
+
)
|
|
224
|
+
rows = conn.execute(sql, (pid, *fact_ids)).fetchall()
|
|
225
|
+
trust_map = {dict(r)["fact_id"]: round(float(dict(r).get("trust_score", 0)), 3) for r in rows}
|
|
226
|
+
for e in events:
|
|
227
|
+
if e["id"] in trust_map:
|
|
228
|
+
e["trust_score"] = trust_map[e["id"]]
|
|
229
|
+
except Exception:
|
|
230
|
+
pass
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def _enrich_retention(
|
|
234
|
+
conn: sqlite3.Connection, pid: str, fact_ids: list[str], events: list[dict],
|
|
235
|
+
) -> None:
|
|
236
|
+
"""Add lifecycle_zone + retention_score from fact_retention."""
|
|
237
|
+
try:
|
|
238
|
+
placeholders = ",".join("?" * len(fact_ids))
|
|
239
|
+
sql = (
|
|
240
|
+
"SELECT fact_id, lifecycle_zone, retention_score "
|
|
241
|
+
"FROM fact_retention WHERE profile_id = ? AND fact_id IN ("
|
|
242
|
+
+ placeholders + ")"
|
|
243
|
+
)
|
|
244
|
+
rows = conn.execute(sql, (pid, *fact_ids)).fetchall()
|
|
245
|
+
ret_map = {dict(r)["fact_id"]: dict(r) for r in rows}
|
|
246
|
+
for e in events:
|
|
247
|
+
d = ret_map.get(e["id"])
|
|
248
|
+
if d:
|
|
249
|
+
e["lifecycle_zone"] = d.get("lifecycle_zone")
|
|
250
|
+
e["retention_score"] = round(float(d.get("retention_score", 0)), 3)
|
|
251
|
+
except Exception:
|
|
252
|
+
pass
|
|
@@ -1035,6 +1035,17 @@ async def trigger_consolidation(request: Request):
|
|
|
1035
1035
|
engine = ConsolidationEngine(db=db, config=config.consolidation, slm_config=config)
|
|
1036
1036
|
result = engine.consolidate(profile_id=pid, lightweight=lightweight)
|
|
1037
1037
|
|
|
1038
|
+
# v3.4.1: Auto-trigger behavioral pattern mining after consolidation
|
|
1039
|
+
try:
|
|
1040
|
+
from superlocalmemory.learning.consolidation_worker import ConsolidationWorker
|
|
1041
|
+
learning_db = config.base_dir / "learning.db"
|
|
1042
|
+
cw = ConsolidationWorker(str(config.db_path), str(learning_db))
|
|
1043
|
+
pattern_count = cw._generate_patterns(pid, False)
|
|
1044
|
+
result["patterns_mined"] = pattern_count
|
|
1045
|
+
logger.info("Auto-mined %d patterns after consolidation", pattern_count)
|
|
1046
|
+
except Exception as exc:
|
|
1047
|
+
logger.debug("Pattern mining after consolidation failed: %s", exc)
|
|
1048
|
+
|
|
1038
1049
|
return {"success": True, **result}
|
|
1039
1050
|
except Exception as e:
|
|
1040
1051
|
return JSONResponse({"error": str(e)}, status_code=500)
|
|
@@ -1561,6 +1572,156 @@ async def process_health(request: Request):
|
|
|
1561
1572
|
|
|
1562
1573
|
# ── 1g. GET /api/v3/v33/overview ─────────────────────────────
|
|
1563
1574
|
|
|
1575
|
+
# ── v3.4.1: Graph Communities ──────────────────────────────────
|
|
1576
|
+
|
|
1577
|
+
@router.get("/graph/communities")
|
|
1578
|
+
async def get_graph_communities(request: Request, profile: str = ""):
|
|
1579
|
+
"""Get community assignments with TF-IDF labels, entities, and colors.
|
|
1580
|
+
|
|
1581
|
+
v3.4.1: Uses TF-IDF labels from config table (computed by GraphAnalyzer
|
|
1582
|
+
at consolidation time). Falls back to inline word frequency if labels
|
|
1583
|
+
not yet computed.
|
|
1584
|
+
"""
|
|
1585
|
+
try:
|
|
1586
|
+
from superlocalmemory.server.routes.helpers import get_active_profile, DB_PATH
|
|
1587
|
+
import sqlite3
|
|
1588
|
+
pid = profile or get_active_profile()
|
|
1589
|
+
|
|
1590
|
+
if not DB_PATH.exists():
|
|
1591
|
+
return {"communities": [], "total": 0}
|
|
1592
|
+
|
|
1593
|
+
conn = sqlite3.connect(str(DB_PATH))
|
|
1594
|
+
conn.row_factory = sqlite3.Row
|
|
1595
|
+
|
|
1596
|
+
# Get community member counts and average pagerank
|
|
1597
|
+
comm_rows = conn.execute(
|
|
1598
|
+
"SELECT community_id, COUNT(*) AS member_count, "
|
|
1599
|
+
"AVG(pagerank_score) AS pagerank_avg "
|
|
1600
|
+
"FROM fact_importance "
|
|
1601
|
+
"WHERE profile_id = ? AND community_id IS NOT NULL "
|
|
1602
|
+
"GROUP BY community_id ORDER BY member_count DESC",
|
|
1603
|
+
(pid,),
|
|
1604
|
+
).fetchall()
|
|
1605
|
+
|
|
1606
|
+
if not comm_rows:
|
|
1607
|
+
conn.close()
|
|
1608
|
+
return {"communities": [], "total": 0}
|
|
1609
|
+
|
|
1610
|
+
# Load TF-IDF labels from config table
|
|
1611
|
+
tfidf_labels = {}
|
|
1612
|
+
try:
|
|
1613
|
+
label_key = "community_labels_" + pid
|
|
1614
|
+
label_row = conn.execute(
|
|
1615
|
+
"SELECT value FROM config WHERE key = ?",
|
|
1616
|
+
(label_key,),
|
|
1617
|
+
).fetchone()
|
|
1618
|
+
if label_row:
|
|
1619
|
+
import json as _json
|
|
1620
|
+
tfidf_labels = _json.loads(dict(label_row)["value"])
|
|
1621
|
+
except Exception:
|
|
1622
|
+
pass
|
|
1623
|
+
|
|
1624
|
+
cluster_colors = [
|
|
1625
|
+
'#667eea', '#764ba2', '#43e97b', '#38f9d7',
|
|
1626
|
+
'#4facfe', '#00f2fe', '#f093fb', '#f5576c',
|
|
1627
|
+
'#fa709a', '#fee140', '#30cfd0', '#330867',
|
|
1628
|
+
]
|
|
1629
|
+
|
|
1630
|
+
communities = []
|
|
1631
|
+
for row in comm_rows:
|
|
1632
|
+
d = dict(row)
|
|
1633
|
+
comm_id = d["community_id"]
|
|
1634
|
+
|
|
1635
|
+
# Use TF-IDF label if available, else generate inline
|
|
1636
|
+
label = tfidf_labels.get(str(comm_id), "")
|
|
1637
|
+
if not label:
|
|
1638
|
+
# Inline fallback: word frequency from top facts
|
|
1639
|
+
try:
|
|
1640
|
+
fact_rows = conn.execute(
|
|
1641
|
+
"SELECT af.content FROM fact_importance fi "
|
|
1642
|
+
"JOIN atomic_facts af ON fi.fact_id = af.fact_id "
|
|
1643
|
+
"WHERE fi.profile_id = ? AND fi.community_id = ? "
|
|
1644
|
+
"ORDER BY fi.pagerank_score DESC LIMIT 20",
|
|
1645
|
+
(pid, comm_id),
|
|
1646
|
+
).fetchall()
|
|
1647
|
+
from collections import defaultdict as _ddict
|
|
1648
|
+
wf = _ddict(int)
|
|
1649
|
+
sw = {"the", "a", "an", "is", "was", "are", "to", "of",
|
|
1650
|
+
"in", "for", "on", "with", "at", "by", "from",
|
|
1651
|
+
"and", "but", "or", "not", "it", "this", "that",
|
|
1652
|
+
"i", "we", "they", "he", "she", "you", "my"}
|
|
1653
|
+
for fr in fact_rows:
|
|
1654
|
+
for w in (dict(fr).get("content", "")).lower().split():
|
|
1655
|
+
w = w.strip(".,;:!?\"'()[]{}")
|
|
1656
|
+
if len(w) > 2 and w not in sw:
|
|
1657
|
+
wf[w] += 1
|
|
1658
|
+
top = sorted(wf.items(), key=lambda x: x[1], reverse=True)[:3]
|
|
1659
|
+
label = ", ".join(w for w, _ in top) if top else f"Community {comm_id}"
|
|
1660
|
+
except Exception:
|
|
1661
|
+
label = f"Community {comm_id}"
|
|
1662
|
+
|
|
1663
|
+
# Get top entities from canonical_entities_json
|
|
1664
|
+
top_entities = []
|
|
1665
|
+
try:
|
|
1666
|
+
ent_rows = conn.execute(
|
|
1667
|
+
"SELECT af.canonical_entities_json FROM fact_importance fi "
|
|
1668
|
+
"JOIN atomic_facts af ON fi.fact_id = af.fact_id "
|
|
1669
|
+
"WHERE fi.profile_id = ? AND fi.community_id = ? "
|
|
1670
|
+
"ORDER BY fi.pagerank_score DESC LIMIT 10",
|
|
1671
|
+
(pid, comm_id),
|
|
1672
|
+
).fetchall()
|
|
1673
|
+
import json as _json2
|
|
1674
|
+
entity_counts: dict = {}
|
|
1675
|
+
for er in ent_rows:
|
|
1676
|
+
raw = dict(er).get("canonical_entities_json", "")
|
|
1677
|
+
if raw:
|
|
1678
|
+
try:
|
|
1679
|
+
for ent in _json2.loads(raw):
|
|
1680
|
+
entity_counts[ent] = entity_counts.get(ent, 0) + 1
|
|
1681
|
+
except (ValueError, TypeError):
|
|
1682
|
+
pass
|
|
1683
|
+
top_entities = sorted(
|
|
1684
|
+
entity_counts, key=entity_counts.get, reverse=True,
|
|
1685
|
+
)[:5]
|
|
1686
|
+
except Exception:
|
|
1687
|
+
pass
|
|
1688
|
+
|
|
1689
|
+
communities.append({
|
|
1690
|
+
"community_id": comm_id,
|
|
1691
|
+
"label": label,
|
|
1692
|
+
"member_count": d["member_count"],
|
|
1693
|
+
"top_entities": top_entities,
|
|
1694
|
+
"color": cluster_colors[comm_id % len(cluster_colors)],
|
|
1695
|
+
"pagerank_avg": round(float(d["pagerank_avg"] or 0), 4),
|
|
1696
|
+
})
|
|
1697
|
+
|
|
1698
|
+
conn.close()
|
|
1699
|
+
return {"communities": communities, "total": len(communities)}
|
|
1700
|
+
|
|
1701
|
+
except Exception as e:
|
|
1702
|
+
return JSONResponse({"error": str(e)}, status_code=500)
|
|
1703
|
+
|
|
1704
|
+
|
|
1705
|
+
@router.post("/graph/run-communities")
|
|
1706
|
+
async def run_community_detection(request: Request):
|
|
1707
|
+
"""Trigger community detection manually (runs GraphAnalyzer)."""
|
|
1708
|
+
try:
|
|
1709
|
+
from superlocalmemory.server.routes.helpers import get_active_profile, DB_PATH
|
|
1710
|
+
from superlocalmemory.storage.database import DatabaseManager
|
|
1711
|
+
from superlocalmemory.storage import schema as _schema
|
|
1712
|
+
from superlocalmemory.core.graph_analyzer import GraphAnalyzer
|
|
1713
|
+
|
|
1714
|
+
pid = get_active_profile()
|
|
1715
|
+
db = DatabaseManager(DB_PATH)
|
|
1716
|
+
db.initialize(_schema)
|
|
1717
|
+
|
|
1718
|
+
analyzer = GraphAnalyzer(db)
|
|
1719
|
+
result = analyzer.compute_and_store(pid)
|
|
1720
|
+
return {"success": True, **result}
|
|
1721
|
+
except Exception as e:
|
|
1722
|
+
return JSONResponse({"error": str(e)}, status_code=500)
|
|
1723
|
+
|
|
1724
|
+
|
|
1564
1725
|
@router.get("/v33/overview")
|
|
1565
1726
|
async def v33_overview(request: Request, profile: str = ""):
|
|
1566
1727
|
"""Get SLM 3.3 feature overview -- all new capabilities at a glance."""
|
|
@@ -152,6 +152,14 @@ def create_app() -> FastAPI:
|
|
|
152
152
|
from superlocalmemory.server.routes.v3_api import router as v3_router
|
|
153
153
|
application.include_router(v3_router)
|
|
154
154
|
|
|
155
|
+
# v3.4.1: Chat SSE + Insights + Timeline endpoints
|
|
156
|
+
for _module_name_v341 in ("chat",):
|
|
157
|
+
try:
|
|
158
|
+
_mod_v341 = __import__(f"superlocalmemory.server.routes.{_module_name_v341}", fromlist=["router"])
|
|
159
|
+
application.include_router(_mod_v341.router)
|
|
160
|
+
except (ImportError, Exception) as _exc:
|
|
161
|
+
logger.warning("Optional router %s failed: %s", _module_name_v341, _exc)
|
|
162
|
+
|
|
155
163
|
# Graceful optional routers
|
|
156
164
|
for _module_name in ("learning", "lifecycle", "behavioral", "compliance"):
|
|
157
165
|
try:
|