superlocalmemory 3.0.29 → 3.0.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -16,9 +16,10 @@ SuperLocalMemory V3 - Intelligent local memory system for AI coding assistants.
16
16
 
17
17
  ---
18
18
 
19
- ## [3.0.29] - 2026-03-21
19
+ ## [3.0.30] - 2026-03-21
20
20
 
21
21
  ### Fixed
22
+ - Profile switching and display uses correct identifiers
22
23
  - Profile sync across CLI, Dashboard, and MCP — all entry points now see the same profiles
23
24
  - Profile switching now persists correctly across restarts
24
25
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "superlocalmemory",
3
- "version": "3.0.29",
3
+ "version": "3.0.30",
4
4
  "description": "Information-geometric agent memory with mathematical guarantees. 4-channel retrieval, Fisher-Rao similarity, zero-LLM mode, EU AI Act compliant. Works with Claude, Cursor, Windsurf, and 17+ AI tools.",
5
5
  "keywords": [
6
6
  "ai-memory",
package/pyproject.toml CHANGED
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "superlocalmemory"
3
- version = "3.0.29"
3
+ version = "3.0.30"
4
4
  description = "Information-geometric agent memory with mathematical guarantees"
5
5
  readme = "README.md"
6
6
  license = {text = "MIT"}
@@ -23,6 +23,9 @@ dependencies = [
23
23
  "rank-bm25>=0.2.2",
24
24
  "vadersentiment>=3.3.2",
25
25
  "einops>=0.8.2",
26
+ "fastapi[all]>=0.135.1",
27
+ "uvicorn>=0.42.0",
28
+ "websockets>=16.0",
26
29
  ]
27
30
 
28
31
  [project.optional-dependencies]
@@ -120,28 +120,29 @@ def ensure_profile_in_json(name: str, description: str = "") -> None:
120
120
  def sync_profiles() -> list[dict]:
121
121
  """Reconcile SQLite and profiles.json. Returns merged profile list.
122
122
 
123
- SQLite is the source of truth. Any profile in profiles.json
124
- that's missing from SQLite is added to SQLite. Any profile in
125
- SQLite that's missing from profiles.json is added to profiles.json.
123
+ SQLite is the source of truth. Uses ``profile_id`` (not ``name``)
124
+ as the canonical key because profile_id is the PK referenced by
125
+ every FK in the database.
126
126
  """
127
127
  db_profiles = _get_db_profiles()
128
128
  json_config = _load_profiles_json()
129
129
  json_profiles = json_config.get('profiles', {})
130
130
 
131
- db_names = {p['name'] for p in db_profiles}
132
- json_names = set(json_profiles.keys())
131
+ # profile_id is the canonical key (PK in SQLite, FK target everywhere)
132
+ db_ids = {p['profile_id'] for p in db_profiles}
133
+ json_keys = set(json_profiles.keys())
133
134
 
134
135
  changed = False
135
136
 
136
137
  # JSON-only → add to SQLite (fixes Dashboard-created profiles)
137
- for name in json_names - db_names:
138
- ensure_profile_in_db(name, json_profiles[name].get('description', ''))
138
+ for key in json_keys - db_ids:
139
+ ensure_profile_in_db(key, json_profiles[key].get('description', ''))
139
140
 
140
141
  # SQLite-only → add to profiles.json (fixes CLI-created profiles)
141
- for name in db_names - json_names:
142
- db_entry = next(p for p in db_profiles if p['name'] == name)
143
- json_profiles[name] = {
144
- 'name': name,
142
+ for pid in db_ids - json_keys:
143
+ db_entry = next(p for p in db_profiles if p['profile_id'] == pid)
144
+ json_profiles[pid] = {
145
+ 'name': pid,
145
146
  'description': db_entry.get('description', ''),
146
147
  'created_at': db_entry.get('created_at', ''),
147
148
  'last_used': db_entry.get('last_used'),
@@ -61,15 +61,16 @@ async def list_profiles():
61
61
 
62
62
  profiles = []
63
63
  for p in merged:
64
- name = p.get('name', p.get('profile_id', ''))
65
- count = _get_memory_count(name)
64
+ # profile_id is the canonical key (PK, FK target, used by engine)
65
+ pid = p.get('profile_id', p.get('name', ''))
66
+ count = _get_memory_count(pid)
66
67
  profiles.append({
67
- "name": name,
68
+ "name": pid,
68
69
  "description": p.get('description', ''),
69
70
  "memory_count": count,
70
71
  "created_at": p.get('created_at', ''),
71
72
  "last_used": p.get('last_used', ''),
72
- "is_active": name == active,
73
+ "is_active": pid == active,
73
74
  })
74
75
 
75
76
  return {
@@ -90,10 +91,10 @@ async def switch_profile(name: str):
90
91
  raise HTTPException(status_code=400, detail="Invalid profile name.")
91
92
 
92
93
  merged = sync_profiles()
93
- merged_names = {p.get('name', p.get('profile_id', '')) for p in merged}
94
+ merged_ids = {p.get('profile_id', p.get('name', '')) for p in merged}
94
95
 
95
- if name not in merged_names:
96
- available = ', '.join(sorted(merged_names))
96
+ if name not in merged_ids:
97
+ available = ', '.join(sorted(merged_ids))
97
98
  raise HTTPException(
98
99
  status_code=404,
99
100
  detail=f"Profile '{name}' not found. Available: {available}",
@@ -139,8 +140,8 @@ async def create_profile(body: ProfileSwitch):
139
140
 
140
141
  # Check both stores for duplicates
141
142
  merged = sync_profiles()
142
- merged_names = {p.get('name', p.get('profile_id', '')) for p in merged}
143
- if name in merged_names:
143
+ merged_ids = {p.get('profile_id', p.get('name', '')) for p in merged}
144
+ if name in merged_ids:
144
145
  raise HTTPException(status_code=409, detail=f"Profile '{name}' already exists")
145
146
 
146
147
  # Write to BOTH stores atomically
@@ -164,8 +165,8 @@ async def delete_profile(name: str):
164
165
  raise HTTPException(status_code=400, detail="Cannot delete 'default' profile")
165
166
 
166
167
  merged = sync_profiles()
167
- merged_names = {p.get('name', p.get('profile_id', '')) for p in merged}
168
- if name not in merged_names:
168
+ merged_ids = {p.get('profile_id', p.get('name', '')) for p in merged}
169
+ if name not in merged_ids:
169
170
  raise HTTPException(status_code=404, detail=f"Profile '{name}' not found")
170
171
 
171
172
  json_config = _load_profiles_json()