superlocalmemory 3.0.29 → 3.0.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -16,11 +16,13 @@ SuperLocalMemory V3 - Intelligent local memory system for AI coding assistants.
16
16
 
17
17
  ---
18
18
 
19
- ## [3.0.29] - 2026-03-21
19
+ ## [3.0.31] - 2026-03-21
20
20
 
21
21
  ### Fixed
22
+ - Profile switching and display uses correct identifiers
22
23
  - Profile sync across CLI, Dashboard, and MCP — all entry points now see the same profiles
23
24
  - Profile switching now persists correctly across restarts
25
+ - Resolve circular import in server module loading
24
26
 
25
27
  ---
26
28
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "superlocalmemory",
3
- "version": "3.0.29",
3
+ "version": "3.0.31",
4
4
  "description": "Information-geometric agent memory with mathematical guarantees. 4-channel retrieval, Fisher-Rao similarity, zero-LLM mode, EU AI Act compliant. Works with Claude, Cursor, Windsurf, and 17+ AI tools.",
5
5
  "keywords": [
6
6
  "ai-memory",
package/pyproject.toml CHANGED
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "superlocalmemory"
3
- version = "3.0.29"
3
+ version = "3.0.31"
4
4
  description = "Information-geometric agent memory with mathematical guarantees"
5
5
  readme = "README.md"
6
6
  license = {text = "MIT"}
@@ -23,6 +23,9 @@ dependencies = [
23
23
  "rank-bm25>=0.2.2",
24
24
  "vadersentiment>=3.3.2",
25
25
  "einops>=0.8.2",
26
+ "fastapi[all]>=0.135.1",
27
+ "uvicorn>=0.42.0",
28
+ "websockets>=16.0",
26
29
  ]
27
30
 
28
31
  [project.optional-dependencies]
@@ -23,7 +23,7 @@ from pydantic import BaseModel
23
23
  import uvicorn
24
24
 
25
25
  from superlocalmemory.server.security_middleware import SecurityHeadersMiddleware
26
- from superlocalmemory.server.ui import SLM_VERSION
26
+ from superlocalmemory.server.routes.helpers import SLM_VERSION
27
27
 
28
28
  logger = logging.getLogger("superlocalmemory.api_server")
29
29
 
@@ -16,6 +16,42 @@ from typing import Optional
16
16
  from fastapi import HTTPException
17
17
  from pydantic import BaseModel, Field
18
18
 
19
+
20
+ # ---------------------------------------------------------------------------
21
+ # Version detection (shared — avoids circular import between ui.py ↔ v3_api.py)
22
+ # ---------------------------------------------------------------------------
23
+
24
+ def _get_version() -> str:
25
+ """Read version from package.json / pyproject.toml / importlib."""
26
+ try:
27
+ import json as _json
28
+ pkg_root = Path(__file__).resolve().parent.parent.parent.parent
29
+ pkg_json = pkg_root / "package.json"
30
+ if pkg_json.exists():
31
+ with open(pkg_json) as f:
32
+ v = _json.load(f).get("version", "")
33
+ if v:
34
+ return v
35
+ except Exception:
36
+ pass
37
+ try:
38
+ import tomllib
39
+ toml_path = Path(__file__).resolve().parent.parent.parent.parent / "pyproject.toml"
40
+ if toml_path.exists():
41
+ with open(toml_path, "rb") as f:
42
+ return tomllib.load(f)["project"]["version"]
43
+ except Exception:
44
+ pass
45
+ try:
46
+ from importlib.metadata import version
47
+ return version("superlocalmemory")
48
+ except Exception:
49
+ pass
50
+ return "unknown"
51
+
52
+
53
+ SLM_VERSION = _get_version()
54
+
19
55
  # V3 paths (migrated from ~/.claude-memory to ~/.superlocalmemory)
20
56
  MEMORY_DIR = Path.home() / ".superlocalmemory"
21
57
  DB_PATH = MEMORY_DIR / "memory.db"
@@ -120,28 +156,29 @@ def ensure_profile_in_json(name: str, description: str = "") -> None:
120
156
  def sync_profiles() -> list[dict]:
121
157
  """Reconcile SQLite and profiles.json. Returns merged profile list.
122
158
 
123
- SQLite is the source of truth. Any profile in profiles.json
124
- that's missing from SQLite is added to SQLite. Any profile in
125
- SQLite that's missing from profiles.json is added to profiles.json.
159
+ SQLite is the source of truth. Uses ``profile_id`` (not ``name``)
160
+ as the canonical key because profile_id is the PK referenced by
161
+ every FK in the database.
126
162
  """
127
163
  db_profiles = _get_db_profiles()
128
164
  json_config = _load_profiles_json()
129
165
  json_profiles = json_config.get('profiles', {})
130
166
 
131
- db_names = {p['name'] for p in db_profiles}
132
- json_names = set(json_profiles.keys())
167
+ # profile_id is the canonical key (PK in SQLite, FK target everywhere)
168
+ db_ids = {p['profile_id'] for p in db_profiles}
169
+ json_keys = set(json_profiles.keys())
133
170
 
134
171
  changed = False
135
172
 
136
173
  # JSON-only → add to SQLite (fixes Dashboard-created profiles)
137
- for name in json_names - db_names:
138
- ensure_profile_in_db(name, json_profiles[name].get('description', ''))
174
+ for key in json_keys - db_ids:
175
+ ensure_profile_in_db(key, json_profiles[key].get('description', ''))
139
176
 
140
177
  # SQLite-only → add to profiles.json (fixes CLI-created profiles)
141
- for name in db_names - json_names:
142
- db_entry = next(p for p in db_profiles if p['name'] == name)
143
- json_profiles[name] = {
144
- 'name': name,
178
+ for pid in db_ids - json_keys:
179
+ db_entry = next(p for p in db_profiles if p['profile_id'] == pid)
180
+ json_profiles[pid] = {
181
+ 'name': pid,
145
182
  'description': db_entry.get('description', ''),
146
183
  'created_at': db_entry.get('created_at', ''),
147
184
  'last_used': db_entry.get('last_used'),
@@ -61,15 +61,16 @@ async def list_profiles():
61
61
 
62
62
  profiles = []
63
63
  for p in merged:
64
- name = p.get('name', p.get('profile_id', ''))
65
- count = _get_memory_count(name)
64
+ # profile_id is the canonical key (PK, FK target, used by engine)
65
+ pid = p.get('profile_id', p.get('name', ''))
66
+ count = _get_memory_count(pid)
66
67
  profiles.append({
67
- "name": name,
68
+ "name": pid,
68
69
  "description": p.get('description', ''),
69
70
  "memory_count": count,
70
71
  "created_at": p.get('created_at', ''),
71
72
  "last_used": p.get('last_used', ''),
72
- "is_active": name == active,
73
+ "is_active": pid == active,
73
74
  })
74
75
 
75
76
  return {
@@ -90,10 +91,10 @@ async def switch_profile(name: str):
90
91
  raise HTTPException(status_code=400, detail="Invalid profile name.")
91
92
 
92
93
  merged = sync_profiles()
93
- merged_names = {p.get('name', p.get('profile_id', '')) for p in merged}
94
+ merged_ids = {p.get('profile_id', p.get('name', '')) for p in merged}
94
95
 
95
- if name not in merged_names:
96
- available = ', '.join(sorted(merged_names))
96
+ if name not in merged_ids:
97
+ available = ', '.join(sorted(merged_ids))
97
98
  raise HTTPException(
98
99
  status_code=404,
99
100
  detail=f"Profile '{name}' not found. Available: {available}",
@@ -139,8 +140,8 @@ async def create_profile(body: ProfileSwitch):
139
140
 
140
141
  # Check both stores for duplicates
141
142
  merged = sync_profiles()
142
- merged_names = {p.get('name', p.get('profile_id', '')) for p in merged}
143
- if name in merged_names:
143
+ merged_ids = {p.get('profile_id', p.get('name', '')) for p in merged}
144
+ if name in merged_ids:
144
145
  raise HTTPException(status_code=409, detail=f"Profile '{name}' already exists")
145
146
 
146
147
  # Write to BOTH stores atomically
@@ -164,8 +165,8 @@ async def delete_profile(name: str):
164
165
  raise HTTPException(status_code=400, detail="Cannot delete 'default' profile")
165
166
 
166
167
  merged = sync_profiles()
167
- merged_names = {p.get('name', p.get('profile_id', '')) for p in merged}
168
- if name not in merged_names:
168
+ merged_ids = {p.get('profile_id', p.get('name', '')) for p in merged}
169
+ if name not in merged_ids:
169
170
  raise HTTPException(status_code=404, detail=f"Profile '{name}' not found")
170
171
 
171
172
  json_config = _load_profiles_json()
@@ -12,7 +12,7 @@ import os
12
12
  from pathlib import Path
13
13
  from fastapi import APIRouter, Request
14
14
  from fastapi.responses import JSONResponse
15
- from superlocalmemory.server.ui import SLM_VERSION
15
+ from superlocalmemory.server.routes.helpers import SLM_VERSION
16
16
 
17
17
  logger = logging.getLogger(__name__)
18
18
 
@@ -25,39 +25,7 @@ from datetime import datetime
25
25
  logger = logging.getLogger(__name__)
26
26
 
27
27
 
28
- def _get_version() -> str:
29
- """Read version from package.json (npm), pyproject.toml, or metadata."""
30
- import json as _json
31
- pkg_root = Path(__file__).resolve().parent.parent.parent.parent
32
- # 1. Try package.json FIRST (source of truth for npm installs)
33
- try:
34
- pkg_json = pkg_root / "package.json"
35
- if pkg_json.exists():
36
- with open(pkg_json) as f:
37
- v = _json.load(f).get("version", "")
38
- if v:
39
- return v
40
- except Exception:
41
- pass
42
- # 2. Try pyproject.toml (source of truth for pip installs)
43
- try:
44
- import tomllib
45
- toml_path = pkg_root / "pyproject.toml"
46
- if toml_path.exists():
47
- with open(toml_path, "rb") as f:
48
- return tomllib.load(f)["project"]["version"]
49
- except Exception:
50
- pass
51
- # 3. Fallback to importlib.metadata
52
- try:
53
- from importlib.metadata import version
54
- return version("superlocalmemory")
55
- except Exception:
56
- pass
57
- return "unknown"
58
-
59
-
60
- SLM_VERSION = _get_version()
28
+ from superlocalmemory.server.routes.helpers import SLM_VERSION # noqa: E402
61
29
 
62
30
  _script_dir = str(Path(__file__).parent.resolve())
63
31
  sys.path = [p for p in sys.path if p not in ("", _script_dir)]