superlocalmemory 3.4.1 → 3.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +9 -12
  2. package/package.json +1 -1
  3. package/pyproject.toml +11 -2
  4. package/scripts/postinstall.js +26 -7
  5. package/src/superlocalmemory/cli/commands.py +71 -60
  6. package/src/superlocalmemory/cli/daemon.py +184 -64
  7. package/src/superlocalmemory/cli/main.py +25 -2
  8. package/src/superlocalmemory/cli/service_installer.py +367 -0
  9. package/src/superlocalmemory/cli/setup_wizard.py +150 -9
  10. package/src/superlocalmemory/core/config.py +28 -0
  11. package/src/superlocalmemory/core/consolidation_engine.py +38 -1
  12. package/src/superlocalmemory/core/engine.py +9 -0
  13. package/src/superlocalmemory/core/health_monitor.py +313 -0
  14. package/src/superlocalmemory/core/reranker_worker.py +19 -5
  15. package/src/superlocalmemory/ingestion/__init__.py +13 -0
  16. package/src/superlocalmemory/ingestion/adapter_manager.py +234 -0
  17. package/src/superlocalmemory/ingestion/base_adapter.py +177 -0
  18. package/src/superlocalmemory/ingestion/calendar_adapter.py +340 -0
  19. package/src/superlocalmemory/ingestion/credentials.py +118 -0
  20. package/src/superlocalmemory/ingestion/gmail_adapter.py +369 -0
  21. package/src/superlocalmemory/ingestion/parsers.py +100 -0
  22. package/src/superlocalmemory/ingestion/transcript_adapter.py +156 -0
  23. package/src/superlocalmemory/learning/consolidation_worker.py +47 -1
  24. package/src/superlocalmemory/learning/entity_compiler.py +377 -0
  25. package/src/superlocalmemory/mcp/server.py +32 -3
  26. package/src/superlocalmemory/mcp/tools_mesh.py +249 -0
  27. package/src/superlocalmemory/mesh/__init__.py +12 -0
  28. package/src/superlocalmemory/mesh/broker.py +344 -0
  29. package/src/superlocalmemory/retrieval/entity_channel.py +12 -6
  30. package/src/superlocalmemory/server/api.py +6 -7
  31. package/src/superlocalmemory/server/routes/adapters.py +63 -0
  32. package/src/superlocalmemory/server/routes/entity.py +151 -0
  33. package/src/superlocalmemory/server/routes/ingest.py +110 -0
  34. package/src/superlocalmemory/server/routes/mesh.py +186 -0
  35. package/src/superlocalmemory/server/unified_daemon.py +693 -0
  36. package/src/superlocalmemory/storage/schema_v343.py +229 -0
  37. package/src/superlocalmemory/ui/css/neural-glass.css +1588 -0
  38. package/src/superlocalmemory/ui/index.html +134 -4
  39. package/src/superlocalmemory/ui/js/memory-chat.js +28 -1
  40. package/src/superlocalmemory/ui/js/ng-entities.js +272 -0
  41. package/src/superlocalmemory/ui/js/ng-health.js +208 -0
  42. package/src/superlocalmemory/ui/js/ng-ingestion.js +203 -0
  43. package/src/superlocalmemory/ui/js/ng-mesh.js +311 -0
  44. package/src/superlocalmemory/ui/js/ng-shell.js +471 -0
  45. package/src/superlocalmemory.egg-info/PKG-INFO +18 -14
  46. package/src/superlocalmemory.egg-info/SOURCES.txt +26 -0
  47. package/src/superlocalmemory.egg-info/requires.txt +9 -1
@@ -0,0 +1,63 @@
1
+ # Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
2
+ # Licensed under the Elastic License 2.0 - see LICENSE file
3
+ # Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
4
+
5
+ """Ingestion adapter management API — enable/disable/start/stop from dashboard.
6
+
7
+ v3.4.4: Users can manage Gmail, Calendar, Transcript adapters entirely from
8
+ the dashboard UI — no CLI needed. The best product experience.
9
+
10
+ Endpoints:
11
+ GET /api/adapters — list all adapters with status
12
+ POST /api/adapters/enable — enable an adapter
13
+ POST /api/adapters/disable — disable an adapter
14
+ POST /api/adapters/start — start a running adapter
15
+ POST /api/adapters/stop — stop a running adapter
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ from fastapi import APIRouter, Request
21
+ from pydantic import BaseModel
22
+
23
+ router = APIRouter(tags=["adapters"])
24
+
25
+
26
+ class AdapterAction(BaseModel):
27
+ name: str
28
+
29
+
30
+ @router.get("/api/adapters")
31
+ async def list_adapters_api():
32
+ """List all adapters with their enabled/running status."""
33
+ from superlocalmemory.ingestion.adapter_manager import list_adapters
34
+ adapters = list_adapters()
35
+ return {"adapters": adapters}
36
+
37
+
38
+ @router.post("/api/adapters/enable")
39
+ async def enable_adapter_api(body: AdapterAction):
40
+ """Enable an adapter (doesn't start it yet)."""
41
+ from superlocalmemory.ingestion.adapter_manager import enable_adapter
42
+ return enable_adapter(body.name)
43
+
44
+
45
+ @router.post("/api/adapters/disable")
46
+ async def disable_adapter_api(body: AdapterAction):
47
+ """Disable and stop an adapter."""
48
+ from superlocalmemory.ingestion.adapter_manager import disable_adapter
49
+ return disable_adapter(body.name)
50
+
51
+
52
+ @router.post("/api/adapters/start")
53
+ async def start_adapter_api(body: AdapterAction):
54
+ """Start a running adapter subprocess."""
55
+ from superlocalmemory.ingestion.adapter_manager import start_adapter
56
+ return start_adapter(body.name)
57
+
58
+
59
+ @router.post("/api/adapters/stop")
60
+ async def stop_adapter_api(body: AdapterAction):
61
+ """Stop a running adapter."""
62
+ from superlocalmemory.ingestion.adapter_manager import stop_adapter
63
+ return stop_adapter(body.name)
@@ -0,0 +1,151 @@
1
+ # Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
2
+ # Licensed under the Elastic License 2.0 - see LICENSE file
3
+ # Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
4
+
5
+ """Entity compilation API routes — view and recompile entity summaries."""
6
+
7
+ from __future__ import annotations
8
+
9
+ from fastapi import APIRouter, HTTPException, Request, Query
10
+
11
+ router = APIRouter(prefix="/api/entity", tags=["entity"])
12
+
13
+
14
+ @router.get("/list")
15
+ async def list_entities(
16
+ request: Request,
17
+ profile: str = Query(default="default"),
18
+ limit: int = Query(default=100, ge=1, le=1000),
19
+ offset: int = Query(default=0, ge=0),
20
+ ):
21
+ """List all entities with basic info (canonical name, type, fact count)."""
22
+ engine = request.app.state.engine
23
+ if engine is None:
24
+ raise HTTPException(503, detail="Engine not initialized")
25
+
26
+ import sqlite3
27
+ import json
28
+ conn = sqlite3.connect(str(engine._config.db_path))
29
+ conn.row_factory = sqlite3.Row
30
+ try:
31
+ total = conn.execute(
32
+ "SELECT COUNT(*) FROM canonical_entities WHERE profile_id = ?",
33
+ (profile,),
34
+ ).fetchone()[0]
35
+
36
+ rows = conn.execute("""
37
+ SELECT ce.entity_id, ce.canonical_name, ce.entity_type,
38
+ ce.fact_count, ce.first_seen, ce.last_seen,
39
+ ep.knowledge_summary, ep.compiled_truth,
40
+ ep.compilation_confidence, ep.last_compiled_at
41
+ FROM canonical_entities ce
42
+ LEFT JOIN entity_profiles ep
43
+ ON ce.entity_id = ep.entity_id AND ep.profile_id = ce.profile_id
44
+ WHERE ce.profile_id = ?
45
+ ORDER BY ce.fact_count DESC
46
+ LIMIT ? OFFSET ?
47
+ """, (profile, limit, offset)).fetchall()
48
+
49
+ entities = []
50
+ for r in rows:
51
+ summary = r["knowledge_summary"] or ""
52
+ entities.append({
53
+ "entity_id": r["entity_id"],
54
+ "name": r["canonical_name"],
55
+ "type": r["entity_type"] or "unknown",
56
+ "fact_count": r["fact_count"] or 0,
57
+ "first_seen": r["first_seen"],
58
+ "last_seen": r["last_seen"],
59
+ "summary_preview": summary[:200] if summary else "",
60
+ "has_compiled_truth": bool(r["compiled_truth"]),
61
+ "confidence": r["compilation_confidence"] or 0.5,
62
+ "last_compiled_at": r["last_compiled_at"],
63
+ })
64
+
65
+ return {"entities": entities, "total": total, "limit": limit, "offset": offset}
66
+ finally:
67
+ conn.close()
68
+
69
+
70
+ @router.get("/{entity_name}")
71
+ async def get_entity(
72
+ entity_name: str,
73
+ request: Request,
74
+ profile: str = Query(default="default"),
75
+ project: str = Query(default=""),
76
+ ):
77
+ """Get compiled truth + timeline for an entity."""
78
+ engine = request.app.state.engine
79
+ if engine is None:
80
+ raise HTTPException(503, detail="Engine not initialized")
81
+
82
+ import sqlite3
83
+ import json
84
+ conn = sqlite3.connect(str(engine._config.db_path))
85
+ conn.row_factory = sqlite3.Row
86
+ try:
87
+ # Search by canonical_name (case-insensitive)
88
+ row = conn.execute("""
89
+ SELECT ep.compiled_truth, ep.timeline, ep.fact_ids_json,
90
+ ep.last_compiled_at, ep.compilation_confidence,
91
+ ep.knowledge_summary, ce.entity_type
92
+ FROM entity_profiles ep
93
+ JOIN canonical_entities ce ON ep.entity_id = ce.entity_id
94
+ WHERE LOWER(ce.canonical_name) = LOWER(?)
95
+ AND ep.profile_id = ?
96
+ AND ep.project_name = ?
97
+ """, (entity_name, profile, project)).fetchone()
98
+
99
+ if not row:
100
+ raise HTTPException(404, detail=f"Entity '{entity_name}' not found")
101
+
102
+ return {
103
+ "entity_name": entity_name,
104
+ "entity_type": row["entity_type"],
105
+ "compiled_truth": row["compiled_truth"] or "",
106
+ "knowledge_summary": row["knowledge_summary"] or "",
107
+ "timeline": json.loads(row["timeline"]) if row["timeline"] else [],
108
+ "source_fact_ids": json.loads(row["fact_ids_json"]) if row["fact_ids_json"] else [],
109
+ "last_compiled_at": row["last_compiled_at"],
110
+ "confidence": row["compilation_confidence"],
111
+ }
112
+ finally:
113
+ conn.close()
114
+
115
+
116
+ @router.post("/{entity_name}/recompile")
117
+ async def recompile_entity(
118
+ entity_name: str,
119
+ request: Request,
120
+ profile: str = Query(default="default"),
121
+ project: str = Query(default=""),
122
+ ):
123
+ """Force immediate recompilation of an entity."""
124
+ engine = request.app.state.engine
125
+ if engine is None:
126
+ raise HTTPException(503, detail="Engine not initialized")
127
+
128
+ import sqlite3
129
+ conn = sqlite3.connect(str(engine._config.db_path))
130
+ conn.row_factory = sqlite3.Row
131
+ try:
132
+ entity = conn.execute(
133
+ "SELECT entity_id, canonical_name, entity_type FROM canonical_entities "
134
+ "WHERE LOWER(canonical_name) = LOWER(?) AND profile_id = ?",
135
+ (entity_name, profile),
136
+ ).fetchone()
137
+
138
+ if not entity:
139
+ raise HTTPException(404, detail=f"Entity '{entity_name}' not found")
140
+
141
+ from superlocalmemory.learning.entity_compiler import EntityCompiler
142
+ compiler = EntityCompiler(str(engine._config.db_path), engine._config)
143
+ result = compiler.compile_entity(
144
+ profile, project, entity["entity_id"], entity["canonical_name"],
145
+ )
146
+
147
+ if result:
148
+ return {"ok": True, **result}
149
+ return {"ok": False, "reason": "no facts to compile"}
150
+ finally:
151
+ conn.close()
@@ -0,0 +1,110 @@
1
+ # Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
2
+ # Licensed under the Elastic License 2.0 - see LICENSE file
3
+ # Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
4
+
5
+ """Ingestion endpoint — accepts data from external adapters.
6
+
7
+ POST /ingest with {content, source_type, dedup_key, metadata}.
8
+ Deduplicates by source_type + dedup_key. Stores via MemoryEngine.
9
+ Admission control: max 10 concurrent ingestions (HTTP 429 on overflow).
10
+
11
+ Part of Qualixar | Author: Varun Pratap Bhardwaj
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ import json
17
+ import sqlite3
18
+ import threading
19
+ from datetime import datetime, timezone
20
+
21
+ from fastapi import APIRouter, HTTPException, Request
22
+ from pydantic import BaseModel
23
+
24
+ router = APIRouter(tags=["ingestion"])
25
+
26
+ _MAX_CONCURRENT = 10
27
+ _active_count = 0
28
+ _active_lock = threading.Lock()
29
+
30
+
31
+ class IngestRequest(BaseModel):
32
+ content: str
33
+ source_type: str
34
+ dedup_key: str
35
+ metadata: dict = {}
36
+
37
+
38
+ @router.post("/ingest")
39
+ async def ingest(req: IngestRequest, request: Request):
40
+ """Ingest content from an external adapter.
41
+
42
+ Deduplicates by (source_type, dedup_key). Returns 429 if too many
43
+ concurrent ingestions. Stores via the singleton MemoryEngine.
44
+ """
45
+ global _active_count
46
+
47
+ engine = request.app.state.engine
48
+ if engine is None:
49
+ raise HTTPException(503, detail="Engine not initialized")
50
+
51
+ if not req.content:
52
+ raise HTTPException(400, detail="content required")
53
+ if not req.source_type:
54
+ raise HTTPException(400, detail="source_type required")
55
+ if not req.dedup_key:
56
+ raise HTTPException(400, detail="dedup_key required")
57
+
58
+ # Admission control
59
+ with _active_lock:
60
+ if _active_count >= _MAX_CONCURRENT:
61
+ raise HTTPException(
62
+ 429,
63
+ detail="Too many concurrent ingestions",
64
+ headers={"Retry-After": "5"},
65
+ )
66
+ _active_count += 1
67
+
68
+ try:
69
+ # Dedup check
70
+ conn = sqlite3.connect(str(engine._config.db_path))
71
+ try:
72
+ existing = conn.execute(
73
+ "SELECT id FROM ingestion_log WHERE source_type=? AND dedup_key=?",
74
+ (req.source_type, req.dedup_key),
75
+ ).fetchone()
76
+ if existing:
77
+ return {"ingested": False, "reason": "already_ingested"}
78
+ finally:
79
+ conn.close()
80
+
81
+ # Store via engine
82
+ metadata = {**req.metadata, "source_type": req.source_type}
83
+ fact_ids = engine.store(req.content, metadata=metadata)
84
+
85
+ # Log to ingestion_log
86
+ conn = sqlite3.connect(str(engine._config.db_path))
87
+ try:
88
+ conn.execute(
89
+ "INSERT OR IGNORE INTO ingestion_log "
90
+ "(source_type, dedup_key, fact_ids, metadata, status, ingested_at) "
91
+ "VALUES (?, ?, ?, ?, 'ingested', ?)",
92
+ (
93
+ req.source_type,
94
+ req.dedup_key,
95
+ json.dumps(fact_ids),
96
+ json.dumps(req.metadata),
97
+ datetime.now(timezone.utc).isoformat(),
98
+ ),
99
+ )
100
+ conn.commit()
101
+ finally:
102
+ conn.close()
103
+
104
+ return {"ingested": True, "fact_ids": fact_ids}
105
+
106
+ except Exception as exc:
107
+ raise HTTPException(500, detail=str(exc))
108
+ finally:
109
+ with _active_lock:
110
+ _active_count -= 1
@@ -0,0 +1,186 @@
1
+ # Copyright (c) 2026 Varun Pratap Bhardwaj / Qualixar
2
+ # Licensed under the Elastic License 2.0 - see LICENSE file
3
+ # Part of SuperLocalMemory V3 | https://qualixar.com | https://varunpratap.com
4
+
5
+ """SLM Mesh — FastAPI routes for P2P agent communication.
6
+
7
+ Mounted at /mesh/* in the unified daemon. Uses MeshBroker for all operations.
8
+
9
+ Part of Qualixar | Author: Varun Pratap Bhardwaj
10
+ """
11
+
12
+ from __future__ import annotations
13
+
14
+ from typing import Optional
15
+
16
+ from fastapi import APIRouter, HTTPException, Request
17
+ from pydantic import BaseModel
18
+
19
+ router = APIRouter(prefix="/mesh", tags=["mesh"])
20
+
21
+
22
+ # -- Request models --
23
+
24
+ class RegisterRequest(BaseModel):
25
+ session_id: str
26
+ summary: str = ""
27
+ host: str = "127.0.0.1"
28
+ port: int = 0
29
+
30
+
31
+ class DeregisterRequest(BaseModel):
32
+ peer_id: str
33
+
34
+
35
+ class HeartbeatRequest(BaseModel):
36
+ peer_id: str
37
+
38
+
39
+ class SummaryRequest(BaseModel):
40
+ peer_id: str
41
+ summary: str
42
+
43
+
44
+ class SendRequest(BaseModel):
45
+ from_peer: str = ""
46
+ to: str
47
+ content: str
48
+ type: str = "text"
49
+
50
+
51
+ class ReadRequest(BaseModel):
52
+ message_ids: list[int]
53
+
54
+
55
+ class StateSetRequest(BaseModel):
56
+ key: str
57
+ value: str
58
+ set_by: str
59
+
60
+
61
+ class LockRequest(BaseModel):
62
+ file_path: str
63
+ locked_by: str
64
+ action: str # acquire, release, query
65
+
66
+
67
+ # -- Helpers --
68
+
69
+ def _get_broker(request: Request):
70
+ broker = getattr(request.app.state, 'mesh_broker', None)
71
+ if broker is None:
72
+ raise HTTPException(503, detail="Mesh broker not initialized")
73
+ # Check if mesh is enabled
74
+ config = getattr(request.app.state, 'config', None)
75
+ if config and not getattr(config, 'mesh_enabled', True):
76
+ raise HTTPException(503, detail="Mesh disabled in config")
77
+ return broker
78
+
79
+
80
+ # -- Routes --
81
+
82
+ @router.post("/register")
83
+ async def register(req: RegisterRequest, request: Request):
84
+ broker = _get_broker(request)
85
+ if not req.session_id:
86
+ raise HTTPException(400, detail="session_id required")
87
+ return broker.register_peer(req.session_id, req.summary, req.host, req.port)
88
+
89
+
90
+ @router.post("/deregister")
91
+ async def deregister(req: DeregisterRequest, request: Request):
92
+ broker = _get_broker(request)
93
+ result = broker.deregister_peer(req.peer_id)
94
+ if not result.get("ok"):
95
+ raise HTTPException(404, detail=result.get("error", "peer not found"))
96
+ return result
97
+
98
+
99
+ @router.get("/peers")
100
+ async def peers(request: Request):
101
+ broker = _get_broker(request)
102
+ return {"peers": broker.list_peers()}
103
+
104
+
105
+ @router.post("/heartbeat")
106
+ async def heartbeat(req: HeartbeatRequest, request: Request):
107
+ broker = _get_broker(request)
108
+ result = broker.heartbeat(req.peer_id)
109
+ if not result.get("ok"):
110
+ raise HTTPException(404, detail=result.get("error", "peer not found"))
111
+ return result
112
+
113
+
114
+ @router.post("/summary")
115
+ async def summary(req: SummaryRequest, request: Request):
116
+ broker = _get_broker(request)
117
+ result = broker.update_summary(req.peer_id, req.summary)
118
+ if not result.get("ok"):
119
+ raise HTTPException(404, detail=result.get("error", "peer not found"))
120
+ return result
121
+
122
+
123
+ @router.post("/send")
124
+ async def send(req: SendRequest, request: Request):
125
+ broker = _get_broker(request)
126
+ result = broker.send_message(req.from_peer, req.to, req.content, req.type)
127
+ if not result.get("ok"):
128
+ raise HTTPException(404, detail=result.get("error", ""))
129
+ return result
130
+
131
+
132
+ @router.get("/inbox/{peer_id}")
133
+ async def inbox(peer_id: str, request: Request):
134
+ broker = _get_broker(request)
135
+ return {"messages": broker.get_inbox(peer_id)}
136
+
137
+
138
+ @router.post("/inbox/{peer_id}/read")
139
+ async def mark_read(peer_id: str, req: ReadRequest, request: Request):
140
+ broker = _get_broker(request)
141
+ return broker.mark_read(peer_id, req.message_ids)
142
+
143
+
144
+ @router.get("/state")
145
+ async def state_all(request: Request):
146
+ broker = _get_broker(request)
147
+ return {"state": broker.get_state()}
148
+
149
+
150
+ @router.post("/state")
151
+ async def state_set(req: StateSetRequest, request: Request):
152
+ broker = _get_broker(request)
153
+ if not req.key:
154
+ raise HTTPException(400, detail="key required")
155
+ return broker.set_state(req.key, req.value, req.set_by)
156
+
157
+
158
+ @router.get("/state/{key}")
159
+ async def state_get(key: str, request: Request):
160
+ broker = _get_broker(request)
161
+ result = broker.get_state_key(key)
162
+ if result is None:
163
+ raise HTTPException(404, detail="key not found")
164
+ return result
165
+
166
+
167
+ @router.post("/lock")
168
+ async def lock(req: LockRequest, request: Request):
169
+ broker = _get_broker(request)
170
+ if not req.file_path or not req.locked_by:
171
+ raise HTTPException(400, detail="file_path and locked_by required")
172
+ if req.action not in ("acquire", "release", "query"):
173
+ raise HTTPException(400, detail="action must be acquire, release, or query")
174
+ return broker.lock_action(req.file_path, req.locked_by, req.action)
175
+
176
+
177
+ @router.get("/events")
178
+ async def events(request: Request):
179
+ broker = _get_broker(request)
180
+ return {"events": broker.get_events()}
181
+
182
+
183
+ @router.get("/status")
184
+ async def status(request: Request):
185
+ broker = _get_broker(request)
186
+ return broker.get_status()